diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000000..cef5b1c29c --- /dev/null +++ b/.coveragerc @@ -0,0 +1,26 @@ +# .coveragerc to control coverage.py +[run] +branch = True + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True + +[html] +directory = coverage_html_report diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index fffb5264e5..94dd61e29b 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -39,7 +39,7 @@ jobs: python: 3.8 fail-fast: false steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: set up Python uses: actions/setup-python@v1 @@ -49,6 +49,14 @@ jobs: - name: install OS & Python packages run: | + # use apt-spy2 to select closest apt mirror, + # which helps avoid connectivity issues in Azure; + # see https://github.com/actions/virtual-environments/issues/675 + sudo gem install apt-spy2 + sudo apt-spy2 check + sudo apt-spy2 fix --commit + # after selecting a specific mirror, we need to run 'apt-get update' + sudo apt-get update # for modules tool sudo apt-get install lua5.2 liblua5.2-dev lua-filesystem lua-posix tcl tcl-dev # fix for lua-posix packaging issue, see https://bugs.launchpad.net/ubuntu/+source/lua-posix/+bug/1752082 @@ -153,19 +161,15 @@ jobs: EB_BOOTSTRAP_VERSION=$(grep '^EB_BOOTSTRAP_VERSION' easybuild/scripts/bootstrap_eb.py | sed 's/[^0-9.]//g') EB_BOOTSTRAP_SHA256SUM=$(sha256sum easybuild/scripts/bootstrap_eb.py | cut -f1 -d' ') EB_BOOTSTRAP_FOUND="$EB_BOOTSTRAP_VERSION $EB_BOOTSTRAP_SHA256SUM" - EB_BOOTSTRAP_EXPECTED="20190922.01 7927513e7448d886decfb1bb5daf840e85dc7367f57cc75e51b68f21fe109d53" + EB_BOOTSTRAP_EXPECTED="20200203.01 616bf3ce812c0844bf9ea3e690f9d88b394ed48f834ddb8424a73cf45fc64ea5" test "$EB_BOOTSTRAP_FOUND" = "$EB_BOOTSTRAP_EXPECTED" || (echo "Version check on bootstrap script failed $EB_BOOTSTRAP_FOUND" && exit 1) - # test bootstrap script (only compatible with Python 2 for now) - if [[ ${{matrix.python}} =~ '2.' ]]; then - export PREFIX=/tmp/$USER/$GITHUB_SHA/eb_bootstrap - python easybuild/scripts/bootstrap_eb.py $PREFIX - # unset $PYTHONPATH to avoid mixing two EasyBuild 'installations' when testing bootstrapped EasyBuild module - unset PYTHONPATH - # simple sanity check on bootstrapped EasyBuild module (skip when testing with Python 3, for now) - module use $PREFIX/modules/all - module load EasyBuild - eb --version - else - echo "Testing of bootstrap script skipped when testing with Python ${{matrix.python}}" - fi + # test bootstrap script + export PREFIX=/tmp/$USER/$GITHUB_SHA/eb_bootstrap + python easybuild/scripts/bootstrap_eb.py $PREFIX + # unset $PYTHONPATH to avoid mixing two EasyBuild 'installations' when testing bootstrapped EasyBuild module + unset PYTHONPATH + # simple sanity check on bootstrapped EasyBuild module + module use $PREFIX/modules/all + module load EasyBuild + eb --version diff --git a/.gitignore b/.gitignore index 31e6aff1ea..c8b95e4482 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.idea .pydevproject .project LICENSE_HEADER diff --git a/.travis.yml b/.travis.yml index 7a6a0f80da..3b032f5291 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,6 +38,8 @@ before_install: - pip --version - pip install --upgrade pip - pip --version + # coveralls doesn't support Python 2.6 anymore, so don't try to install it when testing with Python 2.6 + - if [ "x$TRAVIS_PYTHON_VERSION" != 'x2.6' ]; then pip install coveralls; fi - pip install -r requirements.txt # git config is required to make actual git commits (cfr. tests for GitRepository) - git config --global user.name "Travis CI" @@ -55,14 +57,18 @@ script: - if [ ! -z $MOD_INIT ] && [ ! -z $LMOD_VERSION ]; then alias ml=foobar; fi # set up environment for modules tool (if $MOD_INIT is defined) - if [ ! -z $MOD_INIT ]; then source $MOD_INIT; type module; fi - # install GitHub token + # install GitHub token; + # unset $GITHUB_TOKEN environment variable after installing token, + # to avoid that it is included in environment dump that is included in EasyBuild debug logs, + # which causes test_from_pr_token_log to fail... - if [ ! -z $GITHUB_TOKEN ]; then if [ "x$TRAVIS_PYTHON_VERSION" == 'x2.6' ]; then SET_KEYRING="keyring.set_keyring(keyring.backends.file.PlaintextKeyring())"; else SET_KEYRING="import keyrings; keyring.set_keyring(keyrings.alt.file.PlaintextKeyring())"; fi; python -c "import keyring; $SET_KEYRING; keyring.set_password('github_token', 'easybuild_test', '$GITHUB_TOKEN')"; - fi + fi; + unset GITHUB_TOKEN; - if [ ! -z $TEST_EASYBUILD_MODULES_TOOL ]; then export EASYBUILD_MODULES_TOOL=$TEST_EASYBUILD_MODULES_TOOL; fi - if [ ! -z $TEST_EASYBUILD_MODULE_SYNTAX ]; then export EASYBUILD_MODULE_SYNTAX=$TEST_EASYBUILD_MODULE_SYNTAX; fi # create 'source distribution' tarball, like we do when publishing a release to PyPI @@ -88,8 +94,10 @@ script: # create file owned by root but writable by anyone (used by test_copy_file) - sudo touch /tmp/file_to_overwrite_for_easybuild_test_copy_file.txt - sudo chmod o+w /tmp/file_to_overwrite_for_easybuild_test_copy_file.txt - # run test suite - - python -O -m test.framework.suite 2>&1 | tee test_framework_suite.log + # run coverage on all travis builds except for Python 2.6 + - if [ "x$TRAVIS_PYTHON_VERSION" != 'x2.6' ]; then coverage run -m test.framework.suite 2>&1 | tee test_framework_suite.log; coverage report -m --ignore-errors; fi + # invoke the regression test for Python 2.6 the original way without coverage + - if [ "x$TRAVIS_PYTHON_VERSION" == 'x2.6' ]; then python -O -m test.framework.suite 2>&1 | tee test_framework_suite.log; fi # try and make sure output of running tests is clean (no printed messages/warnings) - IGNORE_PATTERNS="no GitHub token available|skipping SvnRepository test|lib/python2.6/site-packages|requires Lmod as modules tool" # '|| true' is needed to avoid that Travis stops the job on non-zero exit of grep (i.e. when there are no matches) @@ -100,12 +108,15 @@ script: - EB_BOOTSTRAP_VERSION=$(grep '^EB_BOOTSTRAP_VERSION' $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py | sed 's/[^0-9.]//g') - EB_BOOTSTRAP_SHA256SUM=$(sha256sum $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py | cut -f1 -d' ') - EB_BOOTSTRAP_FOUND="$EB_BOOTSTRAP_VERSION $EB_BOOTSTRAP_SHA256SUM" - - EB_BOOTSTRAP_EXPECTED="20190922.01 7927513e7448d886decfb1bb5daf840e85dc7367f57cc75e51b68f21fe109d53" + - EB_BOOTSTRAP_EXPECTED="20200203.01 616bf3ce812c0844bf9ea3e690f9d88b394ed48f834ddb8424a73cf45fc64ea5" - test "$EB_BOOTSTRAP_FOUND" = "$EB_BOOTSTRAP_EXPECTED" || (echo "Version check on bootstrap script failed $EB_BOOTSTRAP_FOUND" && exit 1) - # test bootstrap script (skip when testing with Python 3 for now, since latest EasyBuild release is not compatible with Python 3 yet) - - if [ ! "x$TRAVIS_PYTHON_VERSION" =~ x3.[0-9] ]; then python $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py /tmp/$TRAVIS_JOB_ID/eb_bootstrap; fi + # test bootstrap script + - python $TRAVIS_BUILD_DIR/easybuild/scripts/bootstrap_eb.py /tmp/$TRAVIS_JOB_ID/eb_bootstrap # unset $PYTHONPATH to avoid mixing two EasyBuild 'installations' when testing bootstrapped EasyBuild module - unset PYTHONPATH - # simply sanity check on bootstrapped EasyBuild module (skip when testing with Python 3, for now) - - if [ ! "x$TRAVIS_PYTHON_VERSION" =~ x3.[0-9] ]; then module use /tmp/$TRAVIS_JOB_ID/eb_bootstrap/modules/all; fi - - if [ ! "x$TRAVIS_PYTHON_VERSION" =~ x3.[0-9] ]; then module load EasyBuild; eb --version; fi + # simply sanity check on bootstrapped EasyBuild module + - module use /tmp/$TRAVIS_JOB_ID/eb_bootstrap/modules/all + - module load EasyBuild; eb --version +after_success: + - if [ "x$TRAVIS_PYTHON_VERSION" != 'x2.6' ]; then coveralls; fi + diff --git a/RELEASE_NOTES b/RELEASE_NOTES index 86d174b768..8239f714bc 100644 --- a/RELEASE_NOTES +++ b/RELEASE_NOTES @@ -3,6 +3,15 @@ For more detailed information, please see the git log. These release notes can also be consulted at https://easybuild.readthedocs.io/en/latest/Release_notes.html. +v4.1.2 (March 16th 2020) +------------------------ + +bugfix release + +- fix gitdb dependency on Python 2.6 in test configuration (#3212) +- fix broken test for --review-pr by using different PR to test with (#3226) +- censor authorization part of headers before logging ReST API request (#3248) + v4.1.1 (January 16th 2020) -------------------------- diff --git a/contrib/hooks/README.rst b/contrib/hooks/README.rst new file mode 100644 index 0000000000..626d403ca7 --- /dev/null +++ b/contrib/hooks/README.rst @@ -0,0 +1,15 @@ +Example implementations of EasyBuild hooks +================================= + +.. image:: https://easybuilders.github.io/easybuild/images/easybuild_logo_small.png + :align: center + +EasyBuild website: https://easybuilders.github.io/easybuild/ +docs: https://easybuild.readthedocs.io + +This directory contain examples of implementations of EasyBuild hooks +used at various sites, along with a couple of small examples with +explanations. + +See https://easybuild.readthedocs.io/en/latest/Hooks.html for +documentation on hooks in EasyBuild. diff --git a/contrib/hooks/add_delete_configopt.py b/contrib/hooks/add_delete_configopt.py new file mode 100644 index 0000000000..b349bcbdee --- /dev/null +++ b/contrib/hooks/add_delete_configopt.py @@ -0,0 +1,32 @@ +# Small example of how to add/delete a configure option. +# +# Author: Åke Sandgren, HPC2N + +# We need to be able to distinguish between versions of OpenMPI +from distutils.version import LooseVersion + + +def pre_configure_hook(self, *args, **kwargs): + # Check that we're dealing with the correct easyconfig file + if self.name == 'OpenMPI': + extra_opts = "" + # Enable using pmi from slurm + extra_opts += "--with-pmi=/lap/slurm " + + # And enable munge for OpenMPI versions that knows about it + if LooseVersion(self.version) >= LooseVersion('2'): + extra_opts += "--with-munge " + + # Now add the options + self.log.info("[pre-configure hook] Adding %s" % extra_opts) + self.cfg.update('configopts', extra_opts) + + # Now we delete some options + # For newer versions of OpenMPI we can re-enable ucx, i.e. delete the --without-ucx flag + if LooseVersion(self.version) >= LooseVersion('2.1'): + self.log.info("[pre-configure hook] Re-enabling ucx") + self.cfg['configopts'] = self.cfg['configopts'].replace('--without-ucx', ' ') + + # And we can remove the --disable-dlopen option from the easyconfig file + self.log.info("[pre-configure hook] Re-enabling dlopen") + self.cfg['configopts'] = self.cfg['configopts'].replace('--disable-dlopen', ' ') diff --git a/contrib/hooks/hpc2n_hooks.py b/contrib/hooks/hpc2n_hooks.py new file mode 100644 index 0000000000..ad93fe88e1 --- /dev/null +++ b/contrib/hooks/hpc2n_hooks.py @@ -0,0 +1,213 @@ +# Hooks for HPC2N site changes. +# +# Author: Ake Sandgren, HPC2N + +import os + +from distutils.version import LooseVersion +from easybuild.framework.easyconfig.format.format import DEPENDENCY_PARAMETERS +from easybuild.tools.filetools import apply_regex_substitutions +from easybuild.tools.build_log import EasyBuildError +from easybuild.tools.modules import get_software_root +from easybuild.tools.systemtools import get_shared_lib_ext + +# Add/remove dependencies and/or patches +# Access to the raw values before templating and such. +def parse_hook(ec, *args, **kwargs): + + # Internal helper function + def add_extra_dependencies(ec, dep_type, extra_deps): + """dep_type: must be in DEPENDENCY_PARAMETERS or 'osdependencies'""" + ec.log.info("[parse hook] Adding %s: %s" % (dep_type, extra_deps)) + + if dep_type in DEPENDENCY_PARAMETERS: + for dep in extra_deps: + ec[dep_type].append(dep) + elif dep_type == 'osdependencies': + if isinstance(extra_deps, tuple): + ec[dep_type].append(extra_deps) + else: + raise EasyBuildError("parse_hook: Type of extra_deps argument (%s), for 'osdependencies' must be " + "tuple, found %s" % (extra_deps, type(extra_deps))) + else: + raise EasyBuildError("parse_hook: Incorrect dependency type in add_extra_dependencies: %s" % dep_type) + + extra_deps = [] + + if ec.name == 'OpenMPI': + if LooseVersion(ec.version) >= LooseVersion('2') and LooseVersion(ec.version) < LooseVersion('2.1.2'): + ec.log.info("[parse hook] Adding pmi and lustre patches") + if LooseVersion(ec.version) < LooseVersion('2.1.1'): + ec['patches'].append('OpenMPI-2.0.0_fix_bad-include_of_pmi_h.patch') + + if LooseVersion(ec.version) < LooseVersion('2.0.2'): + ec['patches'].append('OpenMPI-2.0.1_fix_lustre.patch') + elif LooseVersion(ec.version) < LooseVersion('2.1'): + ec['patches'].append('OpenMPI-2.0.2_fix_lustre.patch') + elif LooseVersion(ec.version) < LooseVersion('2.1.1'): + ec['patches'].append('OpenMPI-2.1.0_fix_lustre.patch') + else: + ec['patches'].append('OpenMPI-2.1.1_fix_lustre.patch') + + if LooseVersion(ec.version) == LooseVersion('4.0.0'): + ec['patches'].append('OpenMPI-4.0.0_fix_configure_bug.patch') + + if LooseVersion(ec.version) >= LooseVersion('2.1'): + pmix_version = '1.2.5' + ucx_version = '1.4.0' + if LooseVersion(ec.version) >= LooseVersion('3'): + pmix_version = '2.2.1' + if LooseVersion(ec.version) >= LooseVersion('4'): + pmix_version = '3.0.2' # OpenMPI 4.0.0 is not compatible with PMIx 3.1.x + + extra_deps.append(('PMIx', pmix_version)) + # Use of external PMIx requires external libevent + # But PMIx already has it as a dependency so we don't need + # to explicitly set it. + + extra_deps.append(('UCX', ucx_version)) + + if ec.name == 'impi': + pmix_version = '3.1.1' + extra_deps.append(('PMIx', pmix_version)) + + if extra_deps: + add_extra_dependencies(ec, 'dependencies', extra_deps) + + +def pre_configure_hook(self, *args, **kwargs): + if self.name == 'GROMACS': + # HPC2N always uses -DGMX_USE_NVML=ON on GPU builds + if get_software_root('CUDA'): + self.log.info("[pre-configure hook] Adding -DGMX_USE_NVML=ON") + self.cfg.update('configopts', "-DGMX_USE_NVML=ON ") + + if self.name == 'OpenMPI': + extra_opts = "" + # Old versions don't work with PMIx, use slurms PMI1 + if LooseVersion(self.version) < LooseVersion('2.1'): + extra_opts += "--with-pmi=/lap/slurm " + if LooseVersion(self.version) >= LooseVersion('2'): + extra_opts += "--with-munge " + + # Using PMIx dependency in easyconfig, see above + if LooseVersion(self.version) >= LooseVersion('2.1'): + if get_software_root('PMIx'): + extra_opts += "--with-pmix=$EBROOTPMIX " + # Use of external PMIx requires external libevent + # We're using the libevent that comes from the PMIx dependency + if get_software_root('libevent'): + extra_opts += "--with-libevent=$EBROOTLIBEVENT " + else: + raise EasyBuildError("Error in pre_configure_hook for OpenMPI: External use of PMIx requires " + "external libevent, which was not found. " + "Check parse_hook for dependency settings.") + else: + raise EasyBuildError("Error in pre_configure_hook for OpenMPI: PMIx not defined in dependencies. " + "Check parse_hook for dependency settings.") + + if get_software_root('UCX'): + extra_opts += "--with-ucx=$EBROOTUCX " + + if LooseVersion(self.version) >= LooseVersion('2'): + extra_opts += "--with-cma " + extra_opts += "--with-lustre " + + # We still need to fix the knem package to install its + # pkg-config .pc file correctly, and we need a more generic + # install dir. + # extra_opts += "--with-knem=/opt/knem-1.1.2.90mlnx1 " + + self.log.info("[pre-configure hook] Adding %s" % extra_opts) + self.cfg.update('configopts', extra_opts) + + if LooseVersion(self.version) >= LooseVersion('2.1'): + self.log.info("[pre-configure hook] Re-enabling ucx") + self.cfg['configopts'] = self.cfg['configopts'].replace('--without-ucx', ' ') + + self.log.info("[pre-configure hook] Re-enabling dlopen") + self.cfg['configopts'] = self.cfg['configopts'].replace('--disable-dlopen', ' ') + + if self.name == 'PMIx': + self.log.info("[pre-configure hook] Adding --with-munge") + self.cfg.update('configopts', "--with-munge ") + if LooseVersion(self.version) >= LooseVersion('2'): + self.log.info("[pre-configure hook] Adding --with-tests-examples") + self.cfg.update('configopts', "--with-tests-examples ") + self.log.info("[pre-configure hook] Adding --disable-per-user-config-files") + self.cfg.update('configopts', "--disable-per-user-config-files") + + +def pre_build_hook(self, *args, **kwargs): + if self.name == 'pyslurm': + self.log.info("[pre-build hook] Adding --slurm=/lap/slurm") + self.cfg.update('buildopts', "--slurm=/lap/slurm ") + + +def post_install_hook(self, *args, **kwargs): + if self.name == 'impi': + # Fix mpirun from IntelMPI to explicitly unset I_MPI_PMI_LIBRARY + # it can only be used with srun. + self.log.info("[post-install hook] Unset I_MPI_PMI_LIBRARY in mpirun") + apply_regex_substitutions(os.path.join(self.installdir, "intel64", "bin", "mpirun"), [ + (r'^(#!/bin/sh.*)$', r'\1\nunset I_MPI_PMI_LIBRARY'), + ]) + + +def pre_module_hook(self, *args, **kwargs): + if self.name == 'impi': + # Add I_MPI_PMI_LIBRARY to module for IntelMPI so it works with + # srun. + self.log.info("[pre-module hook] Set I_MPI_PMI_LIBRARY in impi module") + # Must be done this way, updating self.cfg['modextravars'] + # directly doesn't work due to templating. + en_templ = self.cfg.enable_templating + self.cfg.enable_templating = False + shlib_ext = get_shared_lib_ext() + pmix_root = get_software_root('PMIx') + if pmix_root: + mpi_type = 'pmix_v3' + self.cfg['modextravars'].update({ + 'I_MPI_PMI_LIBRARY': os.path.join(pmix_root, "lib", "libpmi." + shlib_ext) + }) + self.cfg['modextravars'].update({'SLURM_MPI_TYPE': mpi_type}) + # Unfortunately UCX doesn't yet work for unknown reasons. Make sure it is off. + self.cfg['modextravars'].update({'SLURM_PMIX_DIRECT_CONN_UCX': 'false'}) + else: + self.cfg['modextravars'].update({'I_MPI_PMI_LIBRARY': "/lap/slurm/lib/libpmi.so"}) + self.cfg.enable_templating = en_templ + + if self.name == 'OpenBLAS': + self.log.info("[pre-module hook] Set OMP_NUM_THREADS=1 in OpenBLAS module") + self.cfg.update('modluafooter', 'if ((mode() == "load" and os.getenv("OMP_NUM_THREADS") == nil) ' + 'or (mode() == "unload" and os.getenv("__OpenBLAS_set_OMP_NUM_THREADS") == "1")) then ' + 'setenv("OMP_NUM_THREADS","1"); setenv("__OpenBLAS_set_OMP_NUM_THREADS", "1") end') + + if self.name == 'OpenMPI': + if LooseVersion(self.version) < LooseVersion('2.1'): + mpi_type = 'openmpi' + elif LooseVersion(self.version) < LooseVersion('3'): + mpi_type = 'pmix_v1' + elif LooseVersion(self.version) < LooseVersion('4'): + mpi_type = 'pmix_v2' + else: + mpi_type = 'pmix_v3' + + self.log.info("[pre-module hook] Set SLURM_MPI_TYPE=%s in OpenMPI module" % mpi_type) + # Must be done this way, updating self.cfg['modextravars'] + # directly doesn't work due to templating. + en_templ = self.cfg.enable_templating + self.cfg.enable_templating = False + self.cfg['modextravars'].update({'SLURM_MPI_TYPE': mpi_type}) + # Unfortunately UCX doesn't yet work for unknown reasons. Make sure it is off. + self.cfg['modextravars'].update({'SLURM_PMIX_DIRECT_CONN_UCX': 'false'}) + self.cfg.enable_templating = en_templ + + if self.name == 'PMIx': + # This is a, hopefully, temporary workaround for https://github.com/pmix/pmix/issues/1114 + if LooseVersion(self.version) > LooseVersion('2') and LooseVersion(self.version) < LooseVersion('3'): + self.log.info("[pre-module hook] Set PMIX_MCA_gds=^ds21 in PMIx module") + en_templ = self.cfg.enable_templating + self.cfg.enable_templating = False + self.cfg['modextravars'].update({'PMIX_MCA_gds': '^ds21'}) + self.cfg.enable_templating = en_templ diff --git a/easybuild/base/rest.py b/easybuild/base/rest.py index 0aa0b08fcf..842c0bd3c8 100644 --- a/easybuild/base/rest.py +++ b/easybuild/base/rest.py @@ -35,6 +35,7 @@ :author: Jens Timmerman """ import base64 +import copy import json from functools import partial @@ -162,7 +163,13 @@ def request(self, method, url, body, headers, content_type=None): if self.auth_header is not None: headers['Authorization'] = self.auth_header headers['User-Agent'] = self.user_agent - fancylogger.getLogger().debug('cli request: %s, %s, %s, %s', method, url, body, headers) + + # censor contents of 'Authorization' part of header, to avoid leaking tokens or passwords in logs + headers_censored = copy.deepcopy(headers) + headers_censored['Authorization'] = '' + + fancylogger.getLogger().debug('cli request: %s, %s, %s, %s', method, url, body, headers_censored) + # TODO: in recent python: Context manager conn = self.get_connection(method, url, body, headers) status = conn.code diff --git a/easybuild/framework/easyblock.py b/easybuild/framework/easyblock.py index 300f474217..9f79d9b10b 100644 --- a/easybuild/framework/easyblock.py +++ b/easybuild/framework/easyblock.py @@ -62,7 +62,7 @@ from easybuild.framework.easyconfig.tools import get_paths_for from easybuild.framework.easyconfig.templates import TEMPLATE_NAMES_EASYBLOCK_RUN_STEP, template_constant_dict from easybuild.framework.extension import resolve_exts_filter_template -from easybuild.tools import config, filetools +from easybuild.tools import config, run from easybuild.tools.build_details import get_build_stats from easybuild.tools.build_log import EasyBuildError, dry_run_msg, dry_run_warning, dry_run_set_dirs from easybuild.tools.build_log import print_error, print_msg, print_warning @@ -75,8 +75,8 @@ from easybuild.tools.filetools import change_dir, convert_name, compute_checksum, copy_file, derive_alt_pypi_url from easybuild.tools.filetools import diff_files, download_file, encode_class_name, extract_file from easybuild.tools.filetools import find_backup_name_candidate, get_source_tarball_from_git, is_alt_pypi_url -from easybuild.tools.filetools import is_sha256_checksum, mkdir, move_file, move_logs, read_file, remove_dir -from easybuild.tools.filetools import remove_file, rmtree2, verify_checksum, weld_paths, write_file, dir_contains_files +from easybuild.tools.filetools import is_binary, is_sha256_checksum, mkdir, move_file, move_logs, read_file, remove_dir +from easybuild.tools.filetools import remove_file, verify_checksum, weld_paths, write_file, dir_contains_files from easybuild.tools.hooks import BUILD_STEP, CLEANUP_STEP, CONFIGURE_STEP, EXTENSIONS_STEP, FETCH_STEP, INSTALL_STEP from easybuild.tools.hooks import MODULE_STEP, PACKAGE_STEP, PATCH_STEP, PERMISSIONS_STEP, POSTITER_STEP, POSTPROC_STEP from easybuild.tools.hooks import PREPARE_STEP, READY_STEP, SANITYCHECK_STEP, SOURCE_STEP, TEST_STEP, TESTCASES_STEP @@ -1282,47 +1282,69 @@ def make_module_req(self): lines = ['\n'] if os.path.isdir(self.installdir): - change_dir(self.installdir) + old_dir = change_dir(self.installdir) + else: + old_dir = None + if self.dry_run: + self.dry_run_msg("List of paths that would be searched and added to module file:\n") + note = "note: glob patterns are not expanded and existence checks " + note += "for paths are skipped for the statements below due to dry run" + lines.append(self.module_generator.comment(note)) + + # for these environment variables, the corresponding subdirectory must include at least one file + keys_requiring_files = set(('PATH', 'LD_LIBRARY_PATH', 'LIBRARY_PATH', 'CPATH', + 'CMAKE_PREFIX_PATH', 'CMAKE_LIBRARY_PATH')) + + for key, reqs in sorted(requirements.items()): + if isinstance(reqs, string_type): + self.log.warning("Hoisting string value %s into a list before iterating over it", reqs) + reqs = [reqs] if self.dry_run: - self.dry_run_msg("List of paths that would be searched and added to module file:\n") - note = "note: glob patterns are not expanded and existence checks " - note += "for paths are skipped for the statements below due to dry run" - lines.append(self.module_generator.comment(note)) - - # for these environment variables, the corresponding subdirectory must include at least one file - keys_requiring_files = ('CPATH', 'LD_LIBRARY_PATH', 'LIBRARY_PATH', 'PATH') - - for key in sorted(requirements): - if self.dry_run: - self.dry_run_msg(" $%s: %s" % (key, ', '.join(requirements[key]))) - reqs = requirements[key] - if isinstance(reqs, string_type): - self.log.warning("Hoisting string value %s into a list before iterating over it", reqs) - reqs = [reqs] - - for path in reqs: - # only use glob if the string is non-empty - if path and not self.dry_run: - paths = sorted(glob.glob(path)) - if paths and key in keys_requiring_files: - # only retain paths that contain at least one file - retained_paths = [ - path for path in paths - if os.path.isdir(os.path.join(self.installdir, path)) - and dir_contains_files(os.path.join(self.installdir, path)) - ] - self.log.info("Only retaining paths for %s that contain at least one file: %s -> %s", - key, paths, retained_paths) - paths = retained_paths - else: - # empty string is a valid value here (i.e. to prepend the installation prefix, cfr $CUDA_HOME) - paths = [path] + self.dry_run_msg(" $%s: %s" % (key, ', '.join(reqs))) + # Don't expand globs or do any filtering below for dry run + paths = sorted(reqs) + else: + # Expand globs but only if the string is non-empty + # empty string is a valid value here (i.e. to prepend the installation prefix, cfr $CUDA_HOME) + paths = sorted(sum((glob.glob(path) if path else [path] for path in reqs), [])) # sum flattens to list + + # If lib64 is just a symlink to lib we fixup the paths to avoid duplicates + lib64_is_symlink = (all(os.path.isdir(path) for path in ['lib', 'lib64']) + and os.path.samefile('lib', 'lib64')) + if lib64_is_symlink: + fixed_paths = [] + for path in paths: + if (path + os.path.sep).startswith('lib64' + os.path.sep): + # We only need CMAKE_LIBRARY_PATH if there is a separate lib64 path, so skip symlink + if key == 'CMAKE_LIBRARY_PATH': + continue + path = path.replace('lib64', 'lib', 1) + fixed_paths.append(path) + if fixed_paths != paths: + self.log.info("Fixed symlink lib64 in paths for %s: %s -> %s", key, paths, fixed_paths) + paths = fixed_paths + # Use a set to remove duplicates, e.g. by having lib64 and lib which get fixed to lib and lib above + paths = sorted(set(paths)) + if key in keys_requiring_files: + # only retain paths that contain at least one file + retained_paths = [ + path for path in paths + if os.path.isdir(os.path.join(self.installdir, path)) + and dir_contains_files(os.path.join(self.installdir, path)) + ] + if retained_paths != paths: + self.log.info("Only retaining paths for %s that contain at least one file: %s -> %s", + key, paths, retained_paths) + paths = retained_paths + + if paths: + lines.append(self.module_generator.prepend_paths(key, paths)) + if self.dry_run: + self.dry_run_msg('') - lines.append(self.module_generator.prepend_paths(key, paths)) - if self.dry_run: - self.dry_run_msg('') - change_dir(self.orig_workdir) + if old_dir is not None: + change_dir(old_dir) return ''.join(lines) @@ -1342,6 +1364,8 @@ def make_module_req_guess(self): 'CLASSPATH': ['*.jar'], 'XDG_DATA_DIRS': ['share'], 'GI_TYPELIB_PATH': [os.path.join(x, 'girepository-*') for x in lib_paths], + 'CMAKE_PREFIX_PATH': [''], + 'CMAKE_LIBRARY_PATH': ['lib64'], # lib and lib32 are searched through the above } def load_module(self, mod_paths=None, purge=True, extra_modules=None): @@ -1413,7 +1437,7 @@ def clean_up_fake_module(self, fake_mod_data): try: self.modules_tool.unload([self.short_mod_name]) self.modules_tool.remove_module_path(os.path.join(fake_mod_path, self.mod_subdir)) - rmtree2(os.path.dirname(fake_mod_path)) + remove_dir(os.path.dirname(fake_mod_path)) except OSError as err: raise EasyBuildError("Failed to clean up fake module dir %s: %s", fake_mod_path, err) elif self.short_mod_name is None: @@ -1439,7 +1463,7 @@ def prepare_for_extensions(self): def skip_extensions(self): """ Called when self.skip is True - - use this to detect existing extensions and to remove them from self.exts + - use this to detect existing extensions and to remove them from self.ext_instances - based on initial R version """ # obtaining untemplated reference value is required here to support legacy string templates like name/version @@ -1449,17 +1473,18 @@ def skip_extensions(self): raise EasyBuildError("Skipping of extensions, but no exts_filter set in easyconfig") res = [] - for ext in self.exts: - cmd, stdin = resolve_exts_filter_template(exts_filter, ext) + for ext_inst in self.ext_instances: + cmd, stdin = resolve_exts_filter_template(exts_filter, ext_inst) (cmdstdouterr, ec) = run_cmd(cmd, log_all=False, log_ok=False, simple=False, inp=stdin, regexp=False) self.log.info("exts_filter result %s %s", cmdstdouterr, ec) if ec: - self.log.info("Not skipping %s" % ext['name']) - self.log.debug("exit code: %s, stdout/err: %s" % (ec, cmdstdouterr)) - res.append(ext) + self.log.info("Not skipping %s", ext_inst.name) + self.log.debug("exit code: %s, stdout/err: %s", ec, cmdstdouterr) + res.append(ext_inst) else: - self.log.info("Skipping %s" % ext['name']) - self.exts = res + print_msg("skipping extension %s" % ext_inst.name, silent=self.silent, log=self.log) + + self.ext_instances = res # # MISCELLANEOUS UTILITY FUNCTIONS @@ -1844,9 +1869,14 @@ def check_checksums_for(self, ent, sub='', source_cnt=None): else: valid_checksums = (checksum,) - if not all(is_sha256_checksum(c) for c in valid_checksums): - msg = "Non-SHA256 checksum(s) found for %s: %s" % (fn, valid_checksums) - checksum_issues.append(msg) + non_sha256_checksums = [c for c in valid_checksums if not is_sha256_checksum(c)] + if non_sha256_checksums: + if all(c is None for c in non_sha256_checksums): + print_warning("Found %d None checksum value(s), please make sure this is intended!" % + len(non_sha256_checksums)) + else: + msg = "Non-SHA256 checksum(s) found for %s: %s" % (fn, valid_checksums) + checksum_issues.append(msg) return checksum_issues @@ -2053,9 +2083,6 @@ def extensions_step(self, fetch=False): self.exts_all = self.exts[:] # retain a copy of all extensions, regardless of filtering/skipping - if self.skip: - self.skip_extensions() - # actually install extensions self.log.debug("Installing extensions") exts_defaultclass = self.cfg['exts_defaultclass'] @@ -2076,14 +2103,8 @@ def extensions_step(self, fetch=False): # get class instances for all extensions self.ext_instances = [] - exts_cnt = len(self.exts) - for idx, ext in enumerate(self.exts): - self.log.debug("Starting extension %s" % ext['name']) - tup = (ext['name'], ext.get('version', ''), idx+1, exts_cnt) - print_msg("installing extension %s %s (%d/%d)..." % tup, silent=self.silent) - - # always go back to original work dir to avoid running stuff from a dir that no longer exists - change_dir(self.orig_workdir) + for ext in self.exts: + self.log.debug("Creating class instance for extension %s...", ext['name']) cls, inst = None, None class_name = encode_class_name(ext['name']) @@ -2095,11 +2116,11 @@ def extensions_step(self, fetch=False): # with a similar name (e.g., Perl Extension 'GO' vs 'Go' for which 'EB_Go' is available) cls = get_easyblock_class(None, name=ext['name'], error_on_failed_import=False, error_on_missing_easyblock=False) - self.log.debug("Obtained class %s for extension %s" % (cls, ext['name'])) + self.log.debug("Obtained class %s for extension %s", cls, ext['name']) if cls is not None: inst = cls(self, ext) except (ImportError, NameError) as err: - self.log.debug("Failed to use extension-specific class for extension %s: %s" % (ext['name'], err)) + self.log.debug("Failed to use extension-specific class for extension %s: %s", ext['name'], err) # alternative attempt: use class specified in class map (if any) if inst is None and ext['name'] in exts_classmap: @@ -2117,7 +2138,7 @@ def extensions_step(self, fetch=False): if inst is None: try: cls = get_class_for(default_class_modpath, default_class) - self.log.debug("Obtained class %s for installing extension %s" % (cls, ext['name'])) + self.log.debug("Obtained class %s for installing extension %s", cls, ext['name']) inst = cls(self, ext) self.log.debug("Installing extension %s with default class %s (from %s)", ext['name'], default_class, default_class_modpath) @@ -2125,10 +2146,26 @@ def extensions_step(self, fetch=False): raise EasyBuildError("Also failed to use default class %s from %s for extension %s: %s, giving up", default_class, default_class_modpath, ext['name'], err) else: - self.log.debug("Installing extension %s with class %s (from %s)" % (ext['name'], class_name, mod_path)) + self.log.debug("Installing extension %s with class %s (from %s)", ext['name'], class_name, mod_path) + + self.ext_instances.append(inst) + + if self.skip: + self.skip_extensions() + + exts_cnt = len(self.ext_instances) + for idx, ext in enumerate(self.ext_instances): + + self.log.debug("Starting extension %s" % ext.name) + + # always go back to original work dir to avoid running stuff from a dir that no longer exists + change_dir(self.orig_workdir) + + tup = (ext.name, ext.version or '', idx+1, exts_cnt) + print_msg("installing extension %s %s (%d/%d)..." % tup, silent=self.silent) if self.dry_run: - tup = (ext['name'], ext.get('version', ''), cls.__name__) + tup = (ext.name, ext.version, cls.__name__) msg = "\n* installing extension %s %s using '%s' easyblock\n" % tup self.dry_run_msg(msg) @@ -2141,18 +2178,15 @@ def extensions_step(self, fetch=False): else: # don't reload modules for toolchain, there is no need since they will be loaded already; # the (fake) module for the parent software gets loaded before installing extensions - inst.toolchain.prepare(onlymod=self.cfg['onlytcmod'], silent=True, loadmod=False, - rpath_filter_dirs=self.rpath_filter_dirs) + ext.toolchain.prepare(onlymod=self.cfg['onlytcmod'], silent=True, loadmod=False, + rpath_filter_dirs=self.rpath_filter_dirs) # real work - inst.prerun() - txt = inst.run() + ext.prerun() + txt = ext.run() if txt: self.module_extra_extensions += txt - inst.postrun() - - # append so we can make us of it later (in sanity_check_step) - self.ext_instances.append(inst) + ext.postrun() # cleanup (unload fake module, remove fake module dir) if fake_mod_data: @@ -2199,17 +2233,26 @@ def fix_shebang(self): lang, shebang, glob_pattern, paths) for path in paths: # check whether file should be patched by checking whether it has a shebang we want to tweak; - # this also helps to skip binary files we may be hitting + # this also helps to skip binary files we may be hitting (but only with Python 3) try: contents = read_file(path, mode='r') should_patch = shebang_regex.match(contents) except (TypeError, UnicodeDecodeError): should_patch = False + contents = None + # if an existing shebang is found, patch it if should_patch: contents = shebang_regex.sub(shebang, contents) write_file(path, contents) + # if no shebang is present at all, add one (but only for non-binary files!) + elif contents is not None and not is_binary(contents) and not contents.startswith('#!'): + self.log.info("The file '%s' doesn't have any shebang present, inserting it as first line.", + path) + contents = shebang + '\n' + contents + write_file(path, contents) + def post_install_step(self): """ Do some postprocessing @@ -2628,7 +2671,7 @@ def cleanup_step(self): self.log.info("Cleaning up builddir %s (in %s)", self.builddir, os.getcwd()) try: - rmtree2(self.builddir) + remove_dir(self.builddir) base = os.path.dirname(self.builddir) # keep removing empty directories until we either find a non-empty one @@ -3000,6 +3043,37 @@ def run_all_steps(self, run_test_cases): print_msg("building and installing %s..." % self.full_mod_name, log=self.log, silent=self.silent) trace_msg("installation prefix: %s" % self.installdir) + + ignore_locks = build_option('ignore_locks') + + if ignore_locks: + self.log.info("Ignoring locks...") + else: + locks_dir = build_option('locks_dir') or os.path.join(install_path('software'), '.locks') + lock_path = os.path.join(locks_dir, '%s.lock' % self.installdir.replace('/', '_')) + + # if lock already exists, either abort or wait until it disappears + if os.path.exists(lock_path): + wait_on_lock = build_option('wait_on_lock') + if wait_on_lock: + while os.path.exists(lock_path): + print_msg("lock %s exists, waiting %d seconds..." % (lock_path, wait_on_lock), + silent=self.silent) + time.sleep(wait_on_lock) + else: + raise EasyBuildError("Lock %s already exists, aborting!", lock_path) + + # create lock to avoid that another installation running in parallel messes things up; + # we use a directory as a lock, since that's atomically created + try: + mkdir(lock_path, parents=True) + except EasyBuildError as err: + # clean up the error message a bit, get rid of the "Failed to create directory" part + quotes + stripped_err = str(err).split(':', 1)[1].strip().replace("'", '').replace('"', '') + raise EasyBuildError("Failed to create lock %s: %s", lock_path, stripped_err) + + self.log.info("Lock created: %s", lock_path) + try: for (step_name, descr, step_methods, skippable) in steps: if self._skip_step(step_name, skippable): @@ -3014,6 +3088,10 @@ def run_all_steps(self, run_test_cases): except StopException: pass + finally: + if not ignore_locks: + remove_dir(lock_path) + self.log.info("Lock removed: %s", lock_path) # return True for successfull build (or stopped build) return True @@ -3059,7 +3137,7 @@ def build_and_install_one(ecdict, init_env): # restore original environment, and then sanitize it _log.info("Resetting environment") - filetools.errors_found_in_log = 0 + run.errors_found_in_log = 0 restore_env(init_env) sanitize_env() @@ -3210,10 +3288,9 @@ def build_and_install_one(ecdict, init_env): print_msg("%s: Installation %s %s (took %s)" % (summary, ended, succ, req_time), log=_log, silent=silent) # check for errors - if filetools.errors_found_in_log > 0: - print_msg("WARNING: %d possible error(s) were detected in the " - "build logs, please verify the build." % filetools.errors_found_in_log, - _log, silent=silent) + if run.errors_found_in_log > 0: + _log.warning("%d possible error(s) were detected in the " + "build logs, please verify the build.", run.errors_found_in_log) if app.postmsg: print_msg("\nWARNING: %s\n" % app.postmsg, log=_log, silent=silent) diff --git a/easybuild/framework/easyconfig/easyconfig.py b/easybuild/framework/easyconfig/easyconfig.py index 8116dec65b..e836239169 100644 --- a/easybuild/framework/easyconfig/easyconfig.py +++ b/easybuild/framework/easyconfig/easyconfig.py @@ -46,6 +46,7 @@ import re from distutils.version import LooseVersion +import easybuild.tools.filetools as filetools from easybuild.base import fancylogger from easybuild.framework.easyconfig import MANDATORY from easybuild.framework.easyconfig.constants import EXTERNAL_MODULE_MARKER @@ -59,10 +60,12 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser, fetch_parameters_from_easyconfig from easybuild.framework.easyconfig.templates import TEMPLATE_CONSTANTS, template_constant_dict from easybuild.tools.build_log import EasyBuildError, print_warning, print_msg -from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_ERROR, LOCAL_VAR_NAMING_CHECK_LOG, LOCAL_VAR_NAMING_CHECK_WARN +from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, LOCAL_VAR_NAMING_CHECK_ERROR, LOCAL_VAR_NAMING_CHECK_LOG +from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN from easybuild.tools.config import Singleton, build_option, get_module_naming_scheme -from easybuild.tools.filetools import EASYBLOCK_CLASS_PREFIX, copy_file, decode_class_name, encode_class_name -from easybuild.tools.filetools import convert_name, find_backup_name_candidate, find_easyconfigs, read_file, write_file +from easybuild.tools.filetools import convert_name, copy_file, create_index, decode_class_name, encode_class_name +from easybuild.tools.filetools import find_backup_name_candidate, find_easyconfigs, load_index +from easybuild.tools.filetools import read_file, write_file from easybuild.tools.hooks import PARSE, load_hooks, run_hook from easybuild.tools.module_naming_scheme.mns import DEVEL_MODULE_SUFFIX from easybuild.tools.module_naming_scheme.utilities import avail_module_naming_schemes, det_full_ec_version @@ -103,6 +106,7 @@ _easyconfig_files_cache = {} _easyconfigs_cache = {} +_path_indexes = {} def handle_deprecated_or_replaced_easyconfig_parameters(ec_method): @@ -554,21 +558,30 @@ def update(self, key, value, allow_duplicate=True): """ Update a string configuration value with a value (i.e. append to it). """ - prev_value = self[key] - if isinstance(prev_value, string_type): - if allow_duplicate or value not in prev_value: - self[key] = '%s %s ' % (prev_value, value) - elif isinstance(prev_value, list): - if allow_duplicate: - self[key] = prev_value + value - else: - for item in value: - # add only those items that aren't already in the list - if item not in prev_value: - self[key] = prev_value + [item] + if isinstance(value, string_type): + lval = [value] + elif isinstance(value, list): + lval = value + else: + msg = "Can't update configuration value for %s, because the " + msg += "attempted update value, '%s', is not a string or list." + raise EasyBuildError(msg, key, value) + + param_value = self[key] + if isinstance(param_value, string_type): + for item in lval: + # re.search: only add value to string if it's not there yet (surrounded by whitespace) + if allow_duplicate or (not re.search(r'(^|\s+)%s(\s+|$)' % re.escape(item), param_value)): + param_value = param_value + ' %s ' % item + elif isinstance(param_value, list): + for item in lval: + if allow_duplicate or item not in param_value: + param_value = param_value + [item] else: raise EasyBuildError("Can't update configuration value for %s, because it's not a string or list.", key) + self[key] = param_value + def set_keys(self, params): """ Set keys in this EasyConfig instance based on supplied easyconfig parameter values. @@ -1543,17 +1556,32 @@ def generate_template_values(self): def _generate_template_values(self, ignore=None): """Actual code to generate the template values""" - if self.template_values is None: - self.template_values = {} # step 0. self.template_values can/should be updated from outside easyconfig - # (eg the run_setp code in EasyBlock) + # (eg the run_step code in EasyBlock) # step 1-3 work with easyconfig.templates constants # disable templating with creating dict with template values to avoid looping back to here via __getitem__ prev_enable_templating = self.enable_templating + + self.enable_templating = False + + if self.template_values is None: + # if no template values are set yet, initiate with a minimal set of template values; + # this is important for easyconfig that use %(version_minor)s to define 'toolchain', + # which is a pretty weird use case, but fine... + self.template_values = template_constant_dict(self, ignore=ignore) + + self.enable_templating = prev_enable_templating + + # grab toolchain instance with templating support enabled, + # which is important in case the Toolchain instance was not created yet + toolchain = self.toolchain + + # get updated set of template values, now with toolchain instance + # (which is used to define the %(mpi_cmd_prefix)s template) self.enable_templating = False - template_values = template_constant_dict(self, ignore=ignore) + template_values = template_constant_dict(self, ignore=ignore, toolchain=toolchain) self.enable_templating = prev_enable_templating # update the template_values dict @@ -1765,8 +1793,8 @@ def get_easyblock_class(easyblock, name=None, error_on_failed_import=True, error def is_generic_easyblock(easyblock): """Return whether specified easyblock name is a generic easyblock or not.""" - - return easyblock and not easyblock.startswith(EASYBLOCK_CLASS_PREFIX) + _log.deprecated("is_generic_easyblock function was moved to easybuild.tools.filetools", '5.0') + return filetools.is_generic_easyblock(easyblock) def get_module_path(name, generic=None, decode=True): @@ -1781,7 +1809,7 @@ def get_module_path(name, generic=None, decode=True): return None if generic is None: - generic = is_generic_easyblock(name) + generic = filetools.is_generic_easyblock(name) # example: 'EB_VSC_minus_tools' should result in 'vsc_tools' if decode: @@ -1790,7 +1818,7 @@ def get_module_path(name, generic=None, decode=True): modpath = ['easybuild', 'easyblocks'] if generic: - modpath.append('generic') + modpath.append(GENERIC_EASYBLOCK_PKG) return '.'.join(modpath + [module_name]) @@ -1983,10 +2011,29 @@ def robot_find_easyconfig(name, version): res = None for path in paths: + + if build_option('ignore_index'): + _log.info("Ignoring index for %s...", path) + path_index = [] + elif path in _path_indexes: + path_index = _path_indexes[path] + _log.info("Found loaded index for %s", path) + elif os.path.exists(path): + path_index = load_index(path) + if path_index is None: + _log.info("No index found for %s, so creating it...", path) + path_index = create_index(path) + else: + _log.info("Loaded index for %s", path) + + _path_indexes[path] = path_index + else: + path_index = [] + easyconfigs_paths = create_paths(path, name, version) for easyconfig_path in easyconfigs_paths: _log.debug("Checking easyconfig path %s" % easyconfig_path) - if os.path.isfile(easyconfig_path): + if easyconfig_path in path_index or os.path.isfile(easyconfig_path): _log.debug("Found easyconfig file for name %s, version %s at %s" % (name, version, easyconfig_path)) _easyconfig_files_cache[key] = os.path.abspath(easyconfig_path) res = _easyconfig_files_cache[key] diff --git a/easybuild/framework/easyconfig/templates.py b/easybuild/framework/easyconfig/templates.py index f024866fd7..b6e6fe1393 100644 --- a/easybuild/framework/easyconfig/templates.py +++ b/easybuild/framework/easyconfig/templates.py @@ -145,7 +145,7 @@ # versionmajor, versionminor, versionmajorminor (eg '.'.join(version.split('.')[:2])) ) -def template_constant_dict(config, ignore=None, skip_lower=None): +def template_constant_dict(config, ignore=None, skip_lower=None, toolchain=None): """Create a dict for templating the values in the easyconfigs. - config is a dict with the structure of EasyConfig._config """ @@ -257,6 +257,17 @@ def template_constant_dict(config, ignore=None, skip_lower=None): except Exception: _log.warning("Failed to get .lower() for name %s value %s (type %s)", name, value, type(value)) + # step 5. add additional conditional templates + if toolchain is not None and hasattr(toolchain, 'mpi_cmd_prefix'): + try: + # get prefix for commands to be run with mpi runtime using default number of ranks + mpi_cmd_prefix = toolchain.mpi_cmd_prefix() + if mpi_cmd_prefix is not None: + template_values['mpi_cmd_prefix'] = mpi_cmd_prefix + except EasyBuildError as err: + # don't fail just because we couldn't resolve this template + _log.warning("Failed to create mpi_cmd_prefix template, error was:\n%s", err) + return template_values diff --git a/easybuild/framework/easyconfig/tools.py b/easybuild/framework/easyconfig/tools.py index 7d717d6258..2a3260ae80 100644 --- a/easybuild/framework/easyconfig/tools.py +++ b/easybuild/framework/easyconfig/tools.py @@ -90,7 +90,7 @@ def skip_available(easyconfigs, modtool): """Skip building easyconfigs for existing modules.""" module_names = [ec['full_mod_name'] for ec in easyconfigs] - modules_exist = modtool.exist(module_names) + modules_exist = modtool.exist(module_names, maybe_partial=False) retained_easyconfigs = [] for ec, mod_name, mod_exists in zip(easyconfigs, module_names, modules_exist): if mod_exists: @@ -604,17 +604,21 @@ def dump_env_script(easyconfigs): def categorize_files_by_type(paths): """ - Splits list of filepaths into a 3 separate lists: easyconfigs, files to delete and patch files + Splits list of filepaths into a 4 separate lists: easyconfigs, files to delete, patch files and + files with extension .py """ res = { 'easyconfigs': [], 'files_to_delete': [], 'patch_files': [], + 'py_files': [], } for path in paths: if path.startswith(':'): res['files_to_delete'].append(path[1:]) + elif path.endswith('.py'): + res['py_files'].append(path) # file must exist in order to check whether it's a patch file elif os.path.isfile(path) and is_patch_file(path): res['patch_files'].append(path) diff --git a/easybuild/framework/easyconfig/types.py b/easybuild/framework/easyconfig/types.py index fd8f2e09a7..17b199cdc2 100644 --- a/easybuild/framework/easyconfig/types.py +++ b/easybuild/framework/easyconfig/types.py @@ -446,9 +446,11 @@ def to_checksums(checksums): res = [] for checksum in checksums: # each list entry can be: - # * a string (MD5 checksum) + # * None (indicates no checksum) + # * a string (MD5 or SHA256 checksum) # * a tuple with 2 elements: checksum type + checksum value # * a list of checksums (i.e. multiple checksums for a single file) + # * a dict (filename to checksum mapping) if isinstance(checksum, string_type): res.append(checksum) elif isinstance(checksum, (list, tuple)): @@ -462,6 +464,8 @@ def to_checksums(checksums): for key, value in checksum.items(): validated_dict[key] = to_checksums(value) res.append(validated_dict) + else: + res.append(checksum) return res diff --git a/easybuild/framework/extension.py b/easybuild/framework/extension.py index a27f81dd47..b44d5759fe 100644 --- a/easybuild/framework/extension.py +++ b/easybuild/framework/extension.py @@ -37,7 +37,7 @@ import os from easybuild.framework.easyconfig.easyconfig import resolve_template -from easybuild.framework.easyconfig.templates import template_constant_dict +from easybuild.framework.easyconfig.templates import TEMPLATE_NAMES_EASYBLOCK_RUN_STEP, template_constant_dict from easybuild.tools.build_log import EasyBuildError, raise_nosupport from easybuild.tools.filetools import change_dir from easybuild.tools.run import run_cmd @@ -111,6 +111,10 @@ def __init__(self, mself, ext, extra_params=None): # construct dict with template values that can be used self.cfg.template_values.update(template_constant_dict({'name': name, 'version': version})) + # Add install/builddir templates with values from master. + for name in TEMPLATE_NAMES_EASYBLOCK_RUN_STEP: + self.cfg.template_values[name[0]] = str(getattr(self.master, name[0], None)) + # list of source/patch files: we use an empty list as default value like in EasyBlock self.src = resolve_template(self.ext.get('src', []), self.cfg.template_values) self.patches = resolve_template(self.ext.get('patches', []), self.cfg.template_values) diff --git a/easybuild/main.py b/easybuild/main.py index 69c47a7293..415321dc9a 100644 --- a/easybuild/main.py +++ b/easybuild/main.py @@ -56,7 +56,8 @@ from easybuild.tools.config import find_last_log, get_repository, get_repositorypath, build_option from easybuild.tools.containers.common import containerize from easybuild.tools.docs import list_software -from easybuild.tools.filetools import adjust_permissions, cleanup, copy_file, copy_files, read_file, write_file +from easybuild.tools.filetools import adjust_permissions, cleanup, copy_file, copy_files, dump_index, load_index +from easybuild.tools.filetools import read_file, write_file from easybuild.tools.github import check_github, close_pr, new_branch_github, find_easybuild_easyconfig from easybuild.tools.github import install_github_token, list_prs, new_pr, new_pr_from_branch, merge_pr from easybuild.tools.github import sync_branch_with_develop, sync_pr_with_develop, update_branch, update_pr @@ -255,9 +256,16 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): elif options.list_software: print(list_software(output_format=options.output_format, detailed=options.list_software == 'detailed')) + elif options.create_index: + print_msg("Creating index for %s..." % options.create_index, prefix=False) + index_fp = dump_index(options.create_index, max_age_sec=options.index_max_age) + index = load_index(options.create_index) + print_msg("Index created at %s (%d files)" % (index_fp, len(index)), prefix=False) + # non-verbose cleanup after handling GitHub integration stuff or printing terse info early_stop_options = [ options.check_github, + options.create_index, options.install_github_token, options.list_installed_software, options.list_software, @@ -291,8 +299,12 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): eb_file = find_easybuild_easyconfig() orig_paths.append(eb_file) - # last path is target when --copy-ec is used, so remove that from the list - target_path = orig_paths.pop() if options.copy_ec else None + if len(orig_paths) == 1: + # if only one easyconfig file is specified, use current directory as target directory + target_path = os.getcwd() + elif orig_paths: + # last path is target when --copy-ec is used, so remove that from the list + target_path = orig_paths.pop() if options.copy_ec else None categorized_paths = categorize_files_by_type(orig_paths) @@ -310,8 +322,12 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): if options.copy_ec: if len(determined_paths) == 1: copy_file(determined_paths[0], target_path) - else: + print_msg("%s copied to %s" % (os.path.basename(determined_paths[0]), target_path), prefix=False) + elif len(determined_paths) > 1: copy_files(determined_paths, target_path) + print_msg("%d file(s) copied to %s" % (len(determined_paths), target_path), prefix=False) + else: + raise EasyBuildError("One of more files to copy should be specified!") elif options.fix_deprecated_easyconfigs: fix_deprecated_easyconfigs(determined_paths) diff --git a/easybuild/scripts/bootstrap_eb.py b/easybuild/scripts/bootstrap_eb.py index cdb0afee25..2e8c487a2a 100644 --- a/easybuild/scripts/bootstrap_eb.py +++ b/easybuild/scripts/bootstrap_eb.py @@ -40,6 +40,7 @@ (via http://dubroy.com/blog/so-you-want-to-install-a-python-package/) """ +import codecs import copy import glob import os @@ -49,12 +50,19 @@ import sys import tempfile import traceback -import urllib2 from distutils.version import LooseVersion from hashlib import md5 +from platform import python_version +IS_PY3 = sys.version_info[0] == 3 -EB_BOOTSTRAP_VERSION = '20190922.01' +if not IS_PY3: + import urllib2 as std_urllib +else: + import urllib.request as std_urllib + + +EB_BOOTSTRAP_VERSION = '20200203.01' # argparse preferrred, optparse deprecated >=2.7 HAVE_ARGPARSE = False @@ -68,7 +76,9 @@ VSC_BASE = 'vsc-base' VSC_INSTALL = 'vsc-install' -EASYBUILD_PACKAGES = [VSC_INSTALL, VSC_BASE, 'easybuild-framework', 'easybuild-easyblocks', 'easybuild-easyconfigs'] +# Python 3 is not supported by the vsc-* packages +EASYBUILD_PACKAGES = (([] if IS_PY3 else [VSC_INSTALL, VSC_BASE]) + + ['easybuild-framework', 'easybuild-easyblocks', 'easybuild-easyconfigs']) STAGE1_SUBDIR = 'eb_stage1' @@ -127,8 +137,10 @@ def error(msg, exit=True): def mock_stdout_stderr(): """Mock stdout/stderr channels""" - # cStringIO is only available in Python 2 - from cStringIO import StringIO + try: + from cStringIO import StringIO + except ImportError: + from io import StringIO orig_stdout, orig_stderr = sys.stdout, sys.stderr sys.stdout.flush() sys.stdout = StringIO() @@ -324,7 +336,7 @@ def check_setuptools(): # check setuptools version try: - os.system(cmd_tmpl % "import setuptools; print setuptools.__version__") + os.system(cmd_tmpl % "import setuptools; print(setuptools.__version__)") setuptools_ver = LooseVersion(open(outfile).read().strip()) debug("Found setuptools version %s" % setuptools_ver) @@ -336,7 +348,7 @@ def check_setuptools(): debug("Failed to check setuptools version: %s" % err) res = False - os.system(cmd_tmpl % "from setuptools.command import easy_install; print easy_install.__file__") + os.system(cmd_tmpl % "from setuptools.command import easy_install; print(easy_install.__file__)") out = open(outfile).read().strip() debug("Location of setuptools' easy_install module: %s" % out) if 'setuptools/command/easy_install' not in out: @@ -344,7 +356,7 @@ def check_setuptools(): res = False if res is None: - os.system(cmd_tmpl % "import setuptools; print setuptools.__file__") + os.system(cmd_tmpl % "import setuptools; print(setuptools.__file__)") setuptools_loc = open(outfile).read().strip() res = os.path.dirname(os.path.dirname(setuptools_loc)) debug("Location of setuptools installation: %s" % res) @@ -523,27 +535,32 @@ def stage1(tmpdir, sourcepath, distribute_egg_dir, forcedversion): # install meta-package easybuild from PyPI if forcedversion: cmd.append('easybuild==%s' % forcedversion) + elif IS_PY3: + cmd.append('easybuild>=4.0') # Python 3 support added in EasyBuild 4 else: cmd.append('easybuild') - # install vsc-base again at the end, to avoid that the one available on the system is used instead - post_vsc_base = cmd[:] - post_vsc_base[-1] = VSC_BASE + '<2.9.0' + if not IS_PY3: + # install vsc-base again at the end, to avoid that the one available on the system is used instead + post_vsc_base = cmd[:] + post_vsc_base[-1] = VSC_BASE + '<2.9.0' if not print_debug: cmd.insert(0, '--quiet') - # install vsc-install version prior to 0.11.4, where mock was introduced as a dependency - # workaround for problem reported in https://github.com/easybuilders/easybuild-framework/issues/2712 - # also stick to vsc-base < 2.9.0 to avoid requiring 'future' Python package as dependency - for pkg in [VSC_INSTALL + '<0.11.4', VSC_BASE + '<2.9.0']: - precmd = cmd[:-1] + [pkg] - info("running pre-install command 'easy_install %s'" % (' '.join(precmd))) - run_easy_install(precmd) + # There is no support for Python3 in the older vsc-* packages and EasyBuild 4 includes working versions of vsc-* + if not IS_PY3: + # install vsc-install version prior to 0.11.4, where mock was introduced as a dependency + # workaround for problem reported in https://github.com/easybuilders/easybuild-framework/issues/2712 + # also stick to vsc-base < 2.9.0 to avoid requiring 'future' Python package as dependency + for pkg in [VSC_INSTALL + '<0.11.4', VSC_BASE + '<2.9.0']: + precmd = cmd[:-1] + [pkg] + info("running pre-install command 'easy_install %s'" % (' '.join(precmd))) + run_easy_install(precmd) info("installing EasyBuild with 'easy_install %s'\n" % (' '.join(cmd))) syntax_error_note = '\n'.join([ - "Note: a 'SyntaxError' may be reported for the easybuild/tools/py2vs3/py3.py module.", + "Note: a 'SyntaxError' may be reported for the easybuild/tools/py2vs3/py%s.py module." % ('3', '2')[IS_PY3], "You can safely ignore this message, it will not affect the functionality of the EasyBuild installation.", '', ]) @@ -632,8 +649,13 @@ def stage1(tmpdir, sourcepath, distribute_egg_dir, forcedversion): # make sure we're getting the expected EasyBuild packages import easybuild.framework import easybuild.easyblocks - import vsc.utils.fancylogger - for pkg in [easybuild.framework, easybuild.easyblocks, vsc.utils.fancylogger]: + pkgs_to_check = [easybuild.framework, easybuild.easyblocks] + # vsc is part of EasyBuild 4 + if LooseVersion(eb_version) < LooseVersion('4'): + import vsc.utils.fancylogger + pkgs_to_check.append(vsc.utils.fancylogger) + + for pkg in pkgs_to_check: if tmpdir not in pkg.__file__: error("Found another %s than expected: %s" % (pkg.__name__, pkg.__file__)) else: @@ -698,8 +720,8 @@ def stage2(tmpdir, templates, install_path, distribute_egg_dir, sourcepath): # determine download URL via PyPI's 'simple' API pkg_simple = None try: - pkg_simple = urllib2.urlopen('https://pypi.python.org/simple/%s' % pkg, timeout=10).read() - except (urllib2.URLError, urllib2.HTTPError) as err: + pkg_simple = std_urllib.urlopen('https://pypi.python.org/simple/%s' % pkg, timeout=10).read() + except (std_urllib.URLError, std_urllib.HTTPError) as err: # failing to figure out the package download URl may be OK when source tarballs are provided if sourcepath: info("Ignoring failed attempt to determine '%s' download URL since source tarballs are provided" % pkg) @@ -707,6 +729,8 @@ def stage2(tmpdir, templates, install_path, distribute_egg_dir, sourcepath): raise err if pkg_simple: + if IS_PY3: + pkg_simple = pkg_simple.decode('utf-8') pkg_url_part_regex = re.compile('/(packages/[^#]+)/%s#' % pkg_filename) res = pkg_url_part_regex.search(pkg_simple) if res: @@ -827,6 +851,8 @@ def main(): """Main script: bootstrap EasyBuild in stages.""" self_txt = open(__file__).read() + if IS_PY3: + self_txt = self_txt.encode('utf-8') info("EasyBuild bootstrap script (version %s, MD5: %s)" % (EB_BOOTSTRAP_VERSION, md5(self_txt).hexdigest())) info("Found Python %s\n" % '; '.join(sys.version.split('\n'))) @@ -866,6 +892,9 @@ def main(): forcedversion = EASYBUILD_BOOTSTRAP_FORCE_VERSION if forcedversion: info("Forcing specified version %s..." % forcedversion) + if IS_PY3 and LooseVersion(forcedversion) < LooseVersion('4'): + error('Python 3 support is only available with EasyBuild 4.x but you are trying to install EasyBuild %s' + % forcedversion) # create temporary dir for temporary installations tmpdir = tempfile.mkdtemp() @@ -982,10 +1011,12 @@ def main(): """ # check Python version -if sys.version_info[0] != 2 or sys.version_info[1] < 6: - pyver = sys.version.split(' ')[0] - sys.stderr.write("ERROR: Incompatible Python version: %s (should be Python 2 >= 2.6)\n" % pyver) - sys.stderr.write("Please try again using 'python2 %s '\n" % os.path.basename(__file__)) +loose_pyver = LooseVersion(python_version()) +min_pyver2 = LooseVersion('2.6') +min_pyver3 = LooseVersion('3.5') +if loose_pyver < min_pyver2 or (loose_pyver >= LooseVersion('3') and loose_pyver < min_pyver3): + sys.stderr.write("ERROR: Incompatible Python version: %s (should be Python 2 >= %s or Python 3 >= %s)\n" + % (python_version(), min_pyver2, min_pyver3)) sys.exit(1) # distribute_setup.py script (https://pypi.python.org/pypi/distribute) @@ -1117,8 +1148,10 @@ def main(): T4E5Gl7wpTxDXdQtzS1Hv52qHSilmOtEVO3IVjCdl5cgC5VC9T6CY1N4U4B0E1tltaqRtuYc/PyB i9tGe6+O/V0LCkGXvNkrKK2++u9qLFyTkO2sp7xSt/Bfil9os3SeOlY5fvv9mLcFj5zSNUqsRZfU 7lwukTHLpfpLDH2GT+yCCf8D2cp1xw== - -""".decode("base64").decode("zlib") +""" +if IS_PY3: + DISTRIBUTE_SETUP_PY = DISTRIBUTE_SETUP_PY.encode('ascii') +DISTRIBUTE_SETUP_PY = codecs.decode(codecs.decode(DISTRIBUTE_SETUP_PY, "base64"), "zlib") # run main function as body of script main() diff --git a/easybuild/scripts/install-EasyBuild-develop.sh b/easybuild/scripts/install-EasyBuild-develop.sh index b5ea2eb3d1..4181d8c42a 100755 --- a/easybuild/scripts/install-EasyBuild-develop.sh +++ b/easybuild/scripts/install-EasyBuild-develop.sh @@ -28,20 +28,11 @@ github_clone_branch() echo "=== Cloning ${GITHUB_USERNAME}/${REPO} ..." git clone --branch "${BRANCH}" "git@github.com:${GITHUB_USERNAME}/${REPO}.git" - if [[ "$REPO" == "vsc"* ]] - then - echo "=== Adding and fetching HPC-UGent GitHub repository @ hpcugent/${REPO} ..." - cd "${REPO}" - git remote add "github_hpcugent" "git@github.com:hpcugent/${REPO}.git" - git fetch github_hpcugent - git branch --set-upstream-to "github_hpcugent/${BRANCH}" "${BRANCH}" - else - echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." - cd "${REPO}" - git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" - git fetch github_easybuilders - git branch --set-upstream-to "github_easybuilders/${BRANCH}" "${BRANCH}" - fi + echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." + cd "${REPO}" + git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" + git fetch github_easybuilders + git branch --set-upstream-to "github_easybuilders/${BRANCH}" "${BRANCH}" } # Print the content of the module @@ -72,8 +63,6 @@ conflict EasyBuild prepend-path PATH "\$root/easybuild-framework" -prepend-path PYTHONPATH "\$root/vsc-base/lib" -prepend-path PYTHONPATH "\$root/vsc-install/lib" prepend-path PYTHONPATH "\$root/easybuild-framework" prepend-path PYTHONPATH "\$root/easybuild-easyblocks" prepend-path PYTHONPATH "\$root/easybuild-easyconfigs" @@ -112,10 +101,6 @@ mkdir -p "${INSTALL_DIR}" cd "${INSTALL_DIR}" INSTALL_DIR="${PWD}" # get the full path -# Clone repository for vsc-base dependency with 'master' branch -github_clone_branch "vsc-base" "master" -github_clone_branch "vsc-install" "master" - # Clone code repositories with the 'develop' branch github_clone_branch "easybuild-framework" "develop" github_clone_branch "easybuild-easyblocks" "develop" diff --git a/easybuild/scripts/install-EasyBuild-sprint.sh b/easybuild/scripts/install-EasyBuild-sprint.sh new file mode 100755 index 0000000000..57a0a802db --- /dev/null +++ b/easybuild/scripts/install-EasyBuild-sprint.sh @@ -0,0 +1,143 @@ +#!/usr/bin/env bash + +# Stop in case of error +set -e + +########################### +# Helpers functions +########################### + +# Print script help +print_usage() +{ + echo "Usage: $0 " + echo + echo " github_username: username on GitHub for which the EasyBuild repositories should be cloned" + echo + echo " install_dir: directory were all the EasyBuild files will be installed" + echo + echo " easyconfigs_branch: easybuild-easyconfigs branch to check out" + echo +} + +# Clone one branch +github_clone_branch() +{ + REPO="$1" + BRANCH="$2" + + cd "${INSTALL_DIR}" + + # Check if BRANCH already exists in the ${GITHUB_USRENAME}/${REPO} + if [[ ! -z $(git ls-remote --heads "git@github.com:${GITHUB_USERNAME}/${REPO}.git" "${BRANCH}") ]]; then + echo "=== Cloning ${GITHUB_USERNAME}/${REPO} branch ${BRANCH} ..." + git clone --branch "${BRANCH}" "git@github.com:${GITHUB_USERNAME}/${REPO}.git" + + echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." + cd "${REPO}" + git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" + git fetch github_easybuilders + git branch --set-upstream-to "github_easybuilders/${BRANCH}" "${BRANCH}" + else + echo "=== Cloning ${GITHUB_USERNAME}/${REPO} ..." + git clone "git@github.com:${GITHUB_USERNAME}/${REPO}.git" + + echo "=== Adding and fetching EasyBuilders GitHub repository @ easybuilders/${REPO} ..." + cd "${REPO}" + git remote add "github_easybuilders" "git@github.com:easybuilders/${REPO}.git" + git fetch github_easybuilders + git checkout -b "${BRANCH}" "github_easybuilders/${BRANCH}" + fi +} + +# Print the content of the module +print_devel_module() +{ +cat < "${EB_DEVEL_MODULE}" +echo +echo "=== Run 'module use ${MODULES_INSTALL_DIR}' and 'module load ${EB_DEVEL_MODULE_NAME}' to use your development version of EasyBuild." +echo "=== (you can append ${MODULES_INSTALL_DIR} to your MODULEPATH to make this module always available for loading)" +echo +echo "=== To update each repository, run 'git pull origin' in each subdirectory of ${INSTALL_DIR}" +echo + +exit 0 + + diff --git a/easybuild/toolchains/mpi/intelmpi.py b/easybuild/toolchains/mpi/intelmpi.py index e404bf72f8..b4811754dc 100644 --- a/easybuild/toolchains/mpi/intelmpi.py +++ b/easybuild/toolchains/mpi/intelmpi.py @@ -29,7 +29,11 @@ :author: Kenneth Hoste (Ghent University) """ +import os + import easybuild.tools.toolchain as toolchain + +from distutils.version import LooseVersion from easybuild.toolchains.mpi.mpich2 import Mpich2 from easybuild.tools.toolchain.constants import COMPILER_FLAGS, COMPILER_VARIABLES from easybuild.tools.toolchain.variables import CommandFlagList @@ -67,6 +71,23 @@ def _set_mpi_compiler_variables(self): super(IntelMPI, self)._set_mpi_compiler_variables() + def _set_mpi_variables(self): + """Set the other MPI variables""" + + super(IntelMPI, self)._set_mpi_variables() + + if (LooseVersion(self.version) >= LooseVersion('2019')): + lib_dir = [os.path.join('intel64', 'lib', 'release')] + incl_dir = [os.path.join('intel64', 'include')] + + for root in self.get_software_root(self.MPI_MODULE_NAME): + self.variables.append_exists('MPI_LIB_STATIC', root, lib_dir, + filename="lib%s.a" % self.MPI_LIBRARY_NAME) + self.variables.append_exists('MPI_LIB_SHARED', root, lib_dir, + filename="lib%s.so" % self.MPI_LIBRARY_NAME) + self.variables.append_exists('MPI_LIB_DIR', root, lib_dir) + self.variables.append_exists('MPI_INC_DIR', root, incl_dir) + MPI_LINK_INFO_OPTION = '-show' def set_variables(self): diff --git a/easybuild/tools/build_log.py b/easybuild/tools/build_log.py index 616c839531..ba45075069 100644 --- a/easybuild/tools/build_log.py +++ b/easybuild/tools/build_log.py @@ -358,10 +358,13 @@ def print_warning(msg, *args, **kwargs): if args: msg = msg % args + log = kwargs.pop('log', None) silent = kwargs.pop('silent', False) if kwargs: raise EasyBuildError("Unknown named arguments passed to print_warning: %s", kwargs) + if log: + log.warning(msg) if not silent: sys.stderr.write("\nWARNING: %s\n\n" % msg) diff --git a/easybuild/tools/config.py b/easybuild/tools/config.py index ab98bcad6d..0bcf31ab8b 100644 --- a/easybuild/tools/config.py +++ b/easybuild/tools/config.py @@ -78,6 +78,8 @@ CONT_TYPES = [CONT_TYPE_DOCKER, CONT_TYPE_SINGULARITY] DEFAULT_CONT_TYPE = CONT_TYPE_SINGULARITY +DEFAULT_BRANCH = 'develop' +DEFAULT_INDEX_MAX_AGE = 7 * 24 * 60 * 60 # 1 week (in seconds) DEFAULT_JOB_BACKEND = 'GC3Pie' DEFAULT_LOGFILE_FORMAT = ("easybuild", "easybuild-%(name)s-%(version)s-%(date)s.%(time)s.log") DEFAULT_MAX_FAIL_RATIO_PERMS = 0.5 @@ -111,6 +113,9 @@ FORCE_DOWNLOAD_CHOICES = [FORCE_DOWNLOAD_ALL, FORCE_DOWNLOAD_PATCHES, FORCE_DOWNLOAD_SOURCES] DEFAULT_FORCE_DOWNLOAD = FORCE_DOWNLOAD_SOURCES +# package name for generic easyblocks +GENERIC_EASYBLOCK_PKG = 'generic' + # general module class GENERAL_CLASS = 'all' @@ -184,6 +189,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'job_output_dir', 'job_polling_interval', 'job_target_resource', + 'locks_dir', 'modules_footer', 'modules_header', 'mpi_cmd_template', @@ -195,7 +201,6 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'pr_commit_msg', 'pr_descr', 'pr_target_account', - 'pr_target_branch', 'pr_target_repo', 'pr_title', 'rpath_filter', @@ -225,6 +230,8 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'group_writable_installdir', 'hidden', 'ignore_checksums', + 'ignore_index', + 'ignore_locks', 'install_latest_eb_release', 'lib64_fallback_sanity_check', 'logtostdout', @@ -249,6 +256,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'use_f90cache', 'use_existing_modules', 'set_default_module', + 'wait_on_lock', ], True: [ 'cleanup_builddir', @@ -270,6 +278,12 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): DEFAULT_CONT_TYPE: [ 'container_type', ], + DEFAULT_BRANCH: [ + 'pr_target_branch', + ], + DEFAULT_INDEX_MAX_AGE: [ + 'index_max_age', + ], DEFAULT_MAX_FAIL_RATIO_PERMS: [ 'max_fail_ratio_adjust_permissions', ], diff --git a/easybuild/tools/containers/docker.py b/easybuild/tools/containers/docker.py index bb5c2eb09e..0da773a269 100644 --- a/easybuild/tools/containers/docker.py +++ b/easybuild/tools/containers/docker.py @@ -34,7 +34,7 @@ from easybuild.tools.config import DOCKER_BASE_IMAGE_CENTOS, DOCKER_BASE_IMAGE_UBUNTU from easybuild.tools.containers.base import ContainerGenerator from easybuild.tools.containers.utils import det_os_deps -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.run import run_cmd @@ -157,4 +157,4 @@ def build_image(self, dockerfile): run_cmd(docker_cmd, path=tempdir, stream_output=True) print_msg("Docker image created at %s" % container_name, log=self.log) - rmtree2(tempdir) + remove_dir(tempdir) diff --git a/easybuild/tools/filetools.py b/easybuild/tools/filetools.py index e414ed68a7..8f357d9c6b 100644 --- a/easybuild/tools/filetools.py +++ b/easybuild/tools/filetools.py @@ -43,6 +43,8 @@ import fileinput import glob import hashlib +import imp +import inspect import os import re import shutil @@ -56,10 +58,10 @@ from easybuild.base import fancylogger from easybuild.tools import run # import build_log must stay, to use of EasyBuildLog -from easybuild.tools.build_log import EasyBuildError, dry_run_msg, print_msg -from easybuild.tools.config import build_option +from easybuild.tools.build_log import EasyBuildError, dry_run_msg, print_msg, print_warning +from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, build_option from easybuild.tools.py2vs3 import std_urllib, string_type -from easybuild.tools.utilities import nub +from easybuild.tools.utilities import nub, remove_unwanted_chars try: import requests @@ -109,6 +111,7 @@ r'~': "_tilde_", } +PATH_INDEX_FILENAME = '.eb-path-index' CHECKSUM_TYPE_MD5 = 'md5' CHECKSUM_TYPE_SHA256 = 'sha256' @@ -241,6 +244,13 @@ def write_file(path, data, append=False, forced=False, backup=False, always_over raise EasyBuildError("Failed to write to %s: %s", path, err) +def is_binary(contents): + """ + Check whether given bytestring represents the contents of a binary file or not. + """ + return isinstance(contents, bytes) and b'\00' in bytes(contents) + + def resolve_path(path): """ Return fully resolved path for given path. @@ -296,11 +306,27 @@ def remove_dir(path): dry_run_msg("directory %s removed" % path, silent=build_option('silent')) return - try: - if os.path.exists(path): - rmtree2(path) - except OSError as err: - raise EasyBuildError("Failed to remove directory %s: %s", path, err) + if os.path.exists(path): + ok = False + errors = [] + # Try multiple times to cater for temporary failures on e.g. NFS mounted paths + max_attempts = 3 + for i in range(0, max_attempts): + try: + shutil.rmtree(path) + ok = True + break + except OSError as err: + _log.debug("Failed to remove path %s with shutil.rmtree at attempt %d: %s" % (path, i, err)) + errors.append(err) + time.sleep(2) + # make sure write permissions are enabled on entire directory + adjust_permissions(path, stat.S_IWUSR, add=True, recursive=True) + if ok: + _log.info("Path %s successfully removed." % path) + else: + raise EasyBuildError("Failed to remove directory %s even after %d attempts.\nReasons: %s", + path, max_attempts, errors) def remove(paths): @@ -589,6 +615,120 @@ def download_file(filename, url, path, forced=False): return None +def create_index(path, ignore_dirs=None): + """ + Create index for files in specified path. + """ + if ignore_dirs is None: + ignore_dirs = [] + + index = set() + + if not os.path.exists(path): + raise EasyBuildError("Specified path does not exist: %s", path) + elif not os.path.isdir(path): + raise EasyBuildError("Specified path is not a directory: %s", path) + + for (dirpath, dirnames, filenames) in os.walk(path, topdown=True, followlinks=True): + for filename in filenames: + # use relative paths in index + rel_dirpath = os.path.relpath(dirpath, path) + # avoid that relative paths start with './' + if rel_dirpath == '.': + rel_dirpath = '' + index.add(os.path.join(rel_dirpath, filename)) + + # do not consider (certain) hidden directories + # note: we still need to consider e.g., .local ! + # replace list elements using [:], so os.walk doesn't process deleted directories + # see https://stackoverflow.com/questions/13454164/os-walk-without-hidden-folders + dirnames[:] = [d for d in dirnames if d not in ignore_dirs] + + return index + + +def dump_index(path, max_age_sec=None): + """ + Create index for files in specified path, and dump it to file (alphabetically sorted). + """ + if max_age_sec is None: + max_age_sec = build_option('index_max_age') + + index_fp = os.path.join(path, PATH_INDEX_FILENAME) + index_contents = create_index(path) + + curr_ts = datetime.datetime.now() + if max_age_sec == 0: + end_ts = datetime.datetime.max + else: + end_ts = curr_ts + datetime.timedelta(0, max_age_sec) + + lines = [ + "# created at: %s" % str(curr_ts), + "# valid until: %s" % str(end_ts), + ] + lines.extend(sorted(index_contents)) + + write_file(index_fp, '\n'.join(lines), always_overwrite=False) + + return index_fp + + +def load_index(path, ignore_dirs=None): + """ + Load index for specified path, and return contents (or None if no index exists). + """ + if ignore_dirs is None: + ignore_dirs = [] + + index_fp = os.path.join(path, PATH_INDEX_FILENAME) + index = set() + + if build_option('ignore_index'): + _log.info("Ignoring index for %s...", path) + + elif os.path.exists(index_fp): + lines = read_file(index_fp).splitlines() + + valid_ts_regex = re.compile("^# valid until: (.*)", re.M) + valid_ts = None + + for line in lines: + + # extract "valid until" timestamp, so we can check whether index is still valid + if valid_ts is None: + res = valid_ts_regex.match(line) + else: + res = None + + if res: + valid_ts = res.group(1) + try: + valid_ts = datetime.datetime.strptime(valid_ts, '%Y-%m-%d %H:%M:%S.%f') + except ValueError as err: + raise EasyBuildError("Failed to parse timestamp '%s' for index at %s: %s", valid_ts, path, err) + + elif line.startswith('#'): + _log.info("Ignoring unknown header line '%s' in index for %s", line, path) + + else: + # filter out files that are in an ignored directory + path_dirs = line.split(os.path.sep)[:-1] + if not any(d in path_dirs for d in ignore_dirs): + index.add(line) + + # check whether index is still valid + if valid_ts: + curr_ts = datetime.datetime.now() + if curr_ts > valid_ts: + print_warning("Index for %s is no longer valid (too old), so ignoring it...", path) + index = None + else: + print_msg("found valid index for %s, so using it...", path) + + return index or None + + def find_easyconfigs(path, ignore_dirs=None): """ Find .eb easyconfig files in path @@ -654,22 +794,26 @@ def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filen if not terse: print_msg("Searching (case-insensitive) for '%s' in %s " % (query.pattern, path), log=_log, silent=silent) - for (dirpath, dirnames, filenames) in os.walk(path, topdown=True): - for filename in filenames: - if query.search(filename): - if not path_hits: - var = "CFGS%d" % var_index - var_index += 1 - if filename_only: - path_hits.append(filename) - else: - path_hits.append(os.path.join(dirpath, filename)) - - # do not consider (certain) hidden directories - # note: we still need to consider e.g., .local ! - # replace list elements using [:], so os.walk doesn't process deleted directories - # see http://stackoverflow.com/questions/13454164/os-walk-without-hidden-folders - dirnames[:] = [d for d in dirnames if d not in ignore_dirs] + path_index = load_index(path, ignore_dirs=ignore_dirs) + if path_index is None or build_option('ignore_index'): + if os.path.exists(path): + _log.info("No index found for %s, creating one...", path) + path_index = create_index(path, ignore_dirs=ignore_dirs) + else: + path_index = [] + else: + _log.info("Index found for %s, so using it...", path) + + for filepath in path_index: + filename = os.path.basename(filepath) + if query.search(filename): + if not path_hits: + var = "CFGS%d" % var_index + var_index += 1 + if filename_only: + path_hits.append(filename) + else: + path_hits.append(os.path.join(path, filepath)) path_hits = sorted(path_hits) @@ -944,7 +1088,9 @@ def det_patched_files(path=None, txt=None, omit_ab_prefix=False, github=False, f patched_regex = re.compile(patched_regex, re.M) if path is not None: - txt = read_file(path) + # take into account that file may contain non-UTF-8 characters; + # so, read a byte string, and decode to UTF-8 string (ignoring any non-UTF-8 characters); + txt = read_file(path, mode='rb').decode('utf-8', 'replace') elif txt is None: raise EasyBuildError("Either a file path or a string representing a patch should be supplied") @@ -1374,22 +1520,8 @@ def path_matches(path, paths): def rmtree2(path, n=3): """Wrapper around shutil.rmtree to make it more robust when used on NFS mounted file systems.""" - ok = False - for i in range(0, n): - try: - shutil.rmtree(path) - ok = True - break - except OSError as err: - _log.debug("Failed to remove path %s with shutil.rmtree at attempt %d: %s" % (path, n, err)) - time.sleep(2) - - # make sure write permissions are enabled on entire directory - adjust_permissions(path, stat.S_IWUSR, add=True, recursive=True) - if not ok: - raise EasyBuildError("Failed to remove path %s with shutil.rmtree, even after %d attempts.", path, n) - else: - _log.info("Path %s successfully removed." % path) + _log.deprecated("Use 'remove_dir' rather than 'rmtree2'", '5.0') + remove_dir(path) def find_backup_name_candidate(src_file): @@ -1864,6 +1996,7 @@ def get_source_tarball_from_git(filename, targetdir, git_config): repo_name = git_config.pop('repo_name', None) commit = git_config.pop('commit', None) recursive = git_config.pop('recursive', False) + keep_git_dir = git_config.pop('keep_git_dir', False) # input validation of git_config dict if git_config: @@ -1912,7 +2045,10 @@ def get_source_tarball_from_git(filename, targetdir, git_config): run.run_cmd(' '.join(checkout_cmd), log_all=True, log_ok=False, simple=False, regexp=False, path=repo_name) # create an archive and delete the git repo directory - tar_cmd = ['tar', 'cfvz', targetpath, '--exclude', '.git', repo_name] + if keep_git_dir: + tar_cmd = ['tar', 'cfvz', targetpath, repo_name] + else: + tar_cmd = ['tar', 'cfvz', targetpath, '--exclude', '.git', repo_name] run.run_cmd(' '.join(tar_cmd), log_all=True, log_ok=False, simple=False, regexp=False) # cleanup (repo_name dir does not exist in dry run mode) @@ -1996,3 +2132,94 @@ def install_fake_vsc(): sys.path.insert(0, fake_vsc_path) return fake_vsc_path + + +def get_easyblock_class_name(path): + """Make sure file is an easyblock and get easyblock class name""" + fn = os.path.basename(path).split('.')[0] + mod = imp.load_source(fn, path) + clsmembers = inspect.getmembers(mod, inspect.isclass) + for cn, co in clsmembers: + if co.__module__ == mod.__name__: + ancestors = inspect.getmro(co) + if any(a.__name__ == 'EasyBlock' for a in ancestors): + return cn + return None + + +def is_generic_easyblock(easyblock): + """Return whether specified easyblock name is a generic easyblock or not.""" + + return easyblock and not easyblock.startswith(EASYBLOCK_CLASS_PREFIX) + + +def copy_easyblocks(paths, target_dir): + """ Find right location for easyblock file and copy it there""" + file_info = { + 'eb_names': [], + 'paths_in_repo': [], + 'new': [], + } + + subdir = os.path.join('easybuild', 'easyblocks') + if os.path.exists(os.path.join(target_dir, subdir)): + for path in paths: + cn = get_easyblock_class_name(path) + if not cn: + raise EasyBuildError("Could not determine easyblock class from file %s" % path) + + eb_name = remove_unwanted_chars(decode_class_name(cn).replace('-', '_')).lower() + + if is_generic_easyblock(cn): + pkgdir = GENERIC_EASYBLOCK_PKG + else: + pkgdir = eb_name[0] + + target_path = os.path.join(subdir, pkgdir, eb_name + '.py') + + full_target_path = os.path.join(target_dir, target_path) + file_info['eb_names'].append(eb_name) + file_info['paths_in_repo'].append(full_target_path) + file_info['new'].append(not os.path.exists(full_target_path)) + copy_file(path, full_target_path, force_in_dry_run=True) + + else: + raise EasyBuildError("Could not find %s subdir in %s", subdir, target_dir) + + return file_info + + +def copy_framework_files(paths, target_dir): + """ Find right location for framework file and copy it there""" + file_info = { + 'paths_in_repo': [], + 'new': [], + } + + paths = [os.path.abspath(path) for path in paths] + + framework_topdir = 'easybuild-framework' + + for path in paths: + target_path = None + dirnames = os.path.dirname(path).split(os.path.sep) + + if framework_topdir in dirnames: + # construct subdirectory by grabbing last entry in dirnames until we hit 'easybuild-framework' dir + subdirs = [] + while(dirnames[-1] != framework_topdir): + subdirs.insert(0, dirnames.pop()) + + parent_dir = os.path.join(*subdirs) if subdirs else '' + target_path = os.path.join(target_dir, parent_dir, os.path.basename(path)) + else: + raise EasyBuildError("Specified path '%s' does not include a '%s' directory!", path, framework_topdir) + + if target_path: + file_info['paths_in_repo'].append(target_path) + file_info['new'].append(not os.path.exists(target_path)) + copy_file(path, target_path) + else: + raise EasyBuildError("Couldn't find parent folder of updated file: %s", path) + + return file_info diff --git a/easybuild/tools/github.py b/easybuild/tools/github.py index 77dd8da0f5..d24d87e7ca 100644 --- a/easybuild/tools/github.py +++ b/easybuild/tools/github.py @@ -50,8 +50,9 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser from easybuild.tools.build_log import EasyBuildError, print_msg, print_warning from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_patch, copy_dir, det_patched_files, download_file, extract_file -from easybuild.tools.filetools import mkdir, read_file, symlink, which, write_file +from easybuild.tools.filetools import apply_patch, copy_dir, copy_easyblocks, copy_framework_files +from easybuild.tools.filetools import det_patched_files, download_file, extract_file +from easybuild.tools.filetools import get_easyblock_class_name, mkdir, read_file, symlink, which, write_file from easybuild.tools.py2vs3 import HTTPError, URLError, ascii_letters, urlopen from easybuild.tools.systemtools import UNKNOWN, get_tool_version from easybuild.tools.utilities import nub, only_if_module_is_available @@ -85,7 +86,9 @@ GITHUB_API_URL = 'https://api.github.com' GITHUB_DIR_TYPE = u'dir' GITHUB_EB_MAIN = 'easybuilders' +GITHUB_EASYBLOCKS_REPO = 'easybuild-easyblocks' GITHUB_EASYCONFIGS_REPO = 'easybuild-easyconfigs' +GITHUB_FRAMEWORK_REPO = 'easybuild-framework' GITHUB_DEVELOP_BRANCH = 'develop' GITHUB_FILE_TYPE = u'file' GITHUB_PR_STATE_OPEN = 'open' @@ -251,7 +254,7 @@ def github_api_get_request(request_f, github_user=None, token=None, **kwargs): _log.warning("Error occurred while performing get request: %s", err) status, data = 0, None - _log.debug("get request result for %s: status: %d, data: %s", url, status, data) + _log.debug("get request result for %s: status: %d, data: %s", url.url, status, data) return (status, data) @@ -284,7 +287,7 @@ def github_api_put_request(request_f, github_user=None, token=None, **kwargs): else: raise EasyBuildError("FAILED: %s", data.get('message', "(unknown reason)")) - _log.debug("get request result for %s: status: %d, data: %s", url, status, data) + _log.debug("get request result for %s: status: %d, data: %s", url.url, status, data) return (status, data) @@ -369,13 +372,32 @@ def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_ return extracted_path +def fetch_easyblocks_from_pr(pr, path=None, github_user=None): + """Fetch patched easyconfig files for a particular PR.""" + return fetch_files_from_pr(pr, path, github_user, github_repo=GITHUB_EASYBLOCKS_REPO) + + def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): """Fetch patched easyconfig files for a particular PR.""" + return fetch_files_from_pr(pr, path, github_user, github_repo=GITHUB_EASYCONFIGS_REPO) + + +def fetch_files_from_pr(pr, path=None, github_user=None, github_repo=None): + """Fetch patched files for a particular PR.""" if github_user is None: github_user = build_option('github_user') + + if github_repo is None: + github_repo = GITHUB_EASYCONFIGS_REPO + if path is None: - path = build_option('pr_path') + if github_repo == GITHUB_EASYCONFIGS_REPO: + path = build_option('pr_path') + elif github_repo == GITHUB_EASYBLOCKS_REPO: + path = os.path.join(tempfile.gettempdir(), 'ebs_pr%s' % pr) + else: + raise EasyBuildError("Unknown repo: %s" % github_repo) if path is None: path = tempfile.mkdtemp() @@ -384,9 +406,17 @@ def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): mkdir(path, parents=True) github_account = build_option('pr_target_account') - github_repo = GITHUB_EASYCONFIGS_REPO - _log.debug("Fetching easyconfigs from %s/%s PR #%s into %s", github_account, github_repo, pr, path) + if github_repo == GITHUB_EASYCONFIGS_REPO: + easyfiles = 'easyconfigs' + elif github_repo == GITHUB_EASYBLOCKS_REPO: + easyfiles = 'easyblocks' + else: + raise EasyBuildError("Don't know how to fetch files from repo %s", github_repo) + + subdir = os.path.join('easybuild', easyfiles) + + _log.debug("Fetching %s from %s/%s PR #%s into %s", easyfiles, github_account, github_repo, pr, path) pr_data, _ = fetch_pr_data(pr, github_account, github_repo, github_user) pr_merged = pr_data['merged'] @@ -429,12 +459,12 @@ def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): if final_path is None: if pr_closed: - print_warning("Using easyconfigs from closed PR #%s" % pr) + print_warning("Using %s from closed PR #%s" % (easyfiles, pr)) # obtain most recent version of patched files - for patched_file in patched_files: + for patched_file in [f for f in patched_files if subdir in f]: # path to patch file, incl. subdir it is in - fn = os.path.sep.join(patched_file.split(os.path.sep)[-3:]) + fn = patched_file.split(subdir)[1].strip(os.path.sep) sha = pr_data['head']['sha'] full_url = URL_SEPARATOR.join([GITHUB_RAW, github_account, github_repo, sha, patched_file]) _log.info("Downloading %s from %s", fn, full_url) @@ -444,21 +474,21 @@ def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): # symlink directories into expected place if they're not there yet if final_path != path: - dirpath = os.path.join(final_path, 'easybuild', 'easyconfigs') + dirpath = os.path.join(final_path, subdir) for eb_dir in os.listdir(dirpath): symlink(os.path.join(dirpath, eb_dir), os.path.join(path, os.path.basename(eb_dir))) # sanity check: make sure all patched files are downloaded - ec_files = [] - for patched_file in [f for f in patched_files if not f.startswith('test/')]: - fn = os.path.sep.join(patched_file.split(os.path.sep)[-3:]) + files = [] + for patched_file in [f for f in patched_files if subdir in f]: + fn = patched_file.split(easyfiles)[1].strip(os.path.sep) full_path = os.path.join(path, fn) if os.path.exists(full_path): - ec_files.append(full_path) + files.append(full_path) else: raise EasyBuildError("Couldn't find path to patched file %s", full_path) - return ec_files + return files def create_gist(txt, fn, descr=None, github_user=None, github_token=None): @@ -673,8 +703,8 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ # we need files to create the PR with non_existing_paths = [] ec_paths = [] - if paths['easyconfigs']: - for path in paths['easyconfigs']: + if paths['easyconfigs'] or paths['py_files']: + for path in paths['easyconfigs'] + paths['py_files']: if not os.path.exists(path): non_existing_paths.append(path) else: @@ -686,14 +716,16 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ if not any(paths.values()): raise EasyBuildError("No paths specified") - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = det_pr_target_repo(paths) + if pr_target_repo is None: + raise EasyBuildError("Failed to determine target repository, please specify it via --pr-target-repo!") # initialize repository git_working_dir = tempfile.mkdtemp(prefix='git-working-dir') git_repo = init_repo(git_working_dir, pr_target_repo) repo_path = os.path.join(git_working_dir, pr_target_repo) - if pr_target_repo != GITHUB_EASYCONFIGS_REPO: + if pr_target_repo not in [GITHUB_EASYCONFIGS_REPO, GITHUB_EASYBLOCKS_REPO, GITHUB_FRAMEWORK_REPO]: raise EasyBuildError("Don't know how to create/update a pull request to the %s repository", pr_target_repo) if start_account is None: @@ -717,21 +749,23 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ # copy easyconfig files to right place target_dir = os.path.join(git_working_dir, pr_target_repo) - print_msg("copying easyconfigs to %s..." % target_dir) - file_info = copy_easyconfigs(ec_paths, target_dir) + print_msg("copying files to %s..." % target_dir) + file_info = COPY_FUNCTIONS[pr_target_repo](ec_paths, os.path.join(git_working_dir, pr_target_repo)) # figure out commit message to use if commit_msg: cnt = len(file_info['paths_in_repo']) - _log.debug("Using specified commit message for all %d new/modified easyconfigs at once: %s", cnt, commit_msg) - elif all(file_info['new']) and not paths['files_to_delete']: + _log.debug("Using specified commit message for all %d new/modified files at once: %s", cnt, commit_msg) + elif pr_target_repo == GITHUB_EASYCONFIGS_REPO and all(file_info['new']) and not paths['files_to_delete']: # automagically derive meaningful commit message if all easyconfig files are new commit_msg = "adding easyconfigs: %s" % ', '.join(os.path.basename(p) for p in file_info['paths_in_repo']) if paths['patch_files']: commit_msg += " and patches: %s" % ', '.join(os.path.basename(p) for p in paths['patch_files']) + elif pr_target_repo == GITHUB_EASYBLOCKS_REPO and all(file_info['new']): + commit_msg = "adding easyblocks: %s" % ', '.join(os.path.basename(p) for p in file_info['paths_in_repo']) else: raise EasyBuildError("A meaningful commit message must be specified via --pr-commit-msg when " - "modifying/deleting easyconfigs") + "modifying/deleting files or targeting the framework repo.") # figure out to which software name patches relate, and copy them to the right place if paths['patch_files']: @@ -776,7 +810,7 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ # checkout target branch if pr_branch is None: - if ec_paths: + if ec_paths and pr_target_repo == GITHUB_EASYCONFIGS_REPO: label = file_info['ecs'][0].name + re.sub('[.-]', '', file_info['ecs'][0].version) else: label = ''.join(random.choice(ascii_letters) for _ in range(10)) @@ -815,7 +849,7 @@ def _easyconfigs_pr_common(paths, ecs, start_branch=None, pr_branch=None, start_ push_branch_to_github(git_repo, target_account, pr_target_repo, pr_branch) - return file_info, deleted_paths, git_repo, pr_branch, diff_stat + return file_info, deleted_paths, git_repo, pr_branch, diff_stat, pr_target_repo def create_remote(git_repo, account, repo, https=False): @@ -991,9 +1025,10 @@ def not_eligible(msg): target = '%s/%s' % (pr_data['base']['repo']['owner']['login'], pr_data['base']['repo']['name']) print_msg("Checking eligibility of %s PR #%s for merging..." % (target, pr_data['number']), prefix=False) - # check target branch, must be 'develop' - msg_tmpl = "* targets develop branch: %s" - if pr_data['base']['ref'] == 'develop': + # check target branch, must be branch name specified in --pr-target-branch (usually 'develop') + pr_target_branch = build_option('pr_target_branch') + msg_tmpl = "* targets %s branch: %%s" % pr_target_branch + if pr_data['base']['ref'] == pr_target_branch: print_msg(msg_tmpl % 'OK', prefix=False) else: res = not_eligible(msg_tmpl % "FAILED; found '%s'" % pr_data['base']['ref']) @@ -1143,7 +1178,7 @@ def close_pr(pr, motivation_msg=None): raise EasyBuildError("GitHub user must be specified to use --close-pr") pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, _ = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=True) @@ -1216,7 +1251,7 @@ def list_prs(params, per_page=GITHUB_MAX_PER_PAGE, github_user=None): print_msg("Listing PRs with parameters: %s" % ', '.join(k + '=' + str(parameters[k]) for k in sorted(parameters))) pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, _ = fetch_pr_data(None, pr_target_account, pr_target_repo, github_user, **parameters) @@ -1236,7 +1271,7 @@ def merge_pr(pr): raise EasyBuildError("GitHub user must be specified to use --merge-pr") pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, pr_url = fetch_pr_data(pr, pr_target_account, pr_target_repo, github_user, full=True) @@ -1276,11 +1311,10 @@ def new_branch_github(paths, ecs, commit_msg=None): """ Create new branch on GitHub using specified files - :param paths: paths to categorized lists of files (easyconfigs, files to delete, patches) + :param paths: paths to categorized lists of files (easyconfigs, files to delete, patches, files with .py extension) :param ecs: list of parsed easyconfigs, incl. for dependencies (if robot is enabled) :param commit_msg: commit message to use """ - branch_name = build_option('pr_branch_name') if commit_msg is None: commit_msg = build_option('pr_commit_msg') @@ -1292,14 +1326,15 @@ def new_branch_github(paths, ecs, commit_msg=None): @only_if_module_is_available('git', pkgname='GitPython') -def new_pr_from_branch(branch_name, title=None, descr=None, pr_metadata=None): +def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None): """ Create new pull request from specified branch on GitHub. """ pr_target_account = build_option('pr_target_account') pr_target_branch = build_option('pr_target_branch') - pr_target_repo = build_option('pr_target_repo') + if pr_target_repo is None: + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO # fetch GitHub token (required to perform actions on GitHub) github_user = build_option('github_user') @@ -1391,52 +1426,60 @@ def new_pr_from_branch(branch_name, title=None, descr=None, pr_metadata=None): file_info = det_file_info(ec_paths, target_dir) - # label easyconfigs for new software and/or new easyconfigs for existing software labels = [] - if any(file_info['new_folder']): - labels.append('new') - if any(file_info['new_file_in_existing_folder']): - labels.append('update') - - # only use most common toolchain(s) in toolchain label of PR title - toolchains = ['%(name)s/%(version)s' % ec['toolchain'] for ec in file_info['ecs']] - toolchains_counted = sorted([(toolchains.count(tc), tc) for tc in nub(toolchains)]) - toolchain_label = ','.join([tc for (cnt, tc) in toolchains_counted if cnt == toolchains_counted[-1][0]]) - - # only use most common module class(es) in moduleclass label of PR title - classes = [ec['moduleclass'] for ec in file_info['ecs']] - classes_counted = sorted([(classes.count(c), c) for c in nub(classes)]) - class_label = ','.join([tc for (cnt, tc) in classes_counted if cnt == classes_counted[-1][0]]) + if pr_target_repo == GITHUB_EASYCONFIGS_REPO: + # label easyconfigs for new software and/or new easyconfigs for existing software + if any(file_info['new_folder']): + labels.append('new') + if any(file_info['new_file_in_existing_folder']): + labels.append('update') + + # only use most common toolchain(s) in toolchain label of PR title + toolchains = ['%(name)s/%(version)s' % ec['toolchain'] for ec in file_info['ecs']] + toolchains_counted = sorted([(toolchains.count(tc), tc) for tc in nub(toolchains)]) + toolchain_label = ','.join([tc for (cnt, tc) in toolchains_counted if cnt == toolchains_counted[-1][0]]) + + # only use most common module class(es) in moduleclass label of PR title + classes = [ec['moduleclass'] for ec in file_info['ecs']] + classes_counted = sorted([(classes.count(c), c) for c in nub(classes)]) + class_label = ','.join([tc for (cnt, tc) in classes_counted if cnt == classes_counted[-1][0]]) + elif pr_target_repo == GITHUB_EASYBLOCKS_REPO: + if any(file_info['new']): + labels.append('new') if title is None: + if pr_target_repo == GITHUB_EASYCONFIGS_REPO: + if file_info['ecs'] and all(file_info['new']) and not deleted_paths: + # mention software name/version in PR title (only first 3) + names_and_versions = nub(["%s v%s" % (ec.name, ec.version) for ec in file_info['ecs']]) + if len(names_and_versions) <= 3: + main_title = ', '.join(names_and_versions) + else: + main_title = ', '.join(names_and_versions[:3] + ['...']) + + title = "{%s}[%s] %s" % (class_label, toolchain_label, main_title) + + # if Python is listed as a dependency, then mention Python version(s) in PR title + pyver = [] + for ec in file_info['ecs']: + # iterate over all dependencies (incl. build dependencies & multi-deps) + for dep in ec.dependencies(): + if dep['name'] == 'Python': + # check whether Python is listed as a multi-dep if it's marked as a build dependency + if dep['build_only'] and 'Python' not in ec['multi_deps']: + continue + else: + pyver.append(dep['version']) + if pyver: + title += " w/ Python %s" % ' + '.join(sorted(nub(pyver))) + elif pr_target_repo == GITHUB_EASYBLOCKS_REPO: + if file_info['eb_names'] and all(file_info['new']) and not deleted_paths: + plural = 's' if len(file_info['eb_names']) > 1 else '' + title = "new easyblock%s for %s" % (plural, (', '.join(file_info['eb_names']))) - if file_info['ecs'] and all(file_info['new']) and not deleted_paths: - # mention software name/version in PR title (only first 3) - names_and_versions = nub(["%s v%s" % (ec.name, ec.version) for ec in file_info['ecs']]) - if len(names_and_versions) <= 3: - main_title = ', '.join(names_and_versions) - else: - main_title = ', '.join(names_and_versions[:3] + ['...']) - - title = "{%s}[%s] %s" % (class_label, toolchain_label, main_title) - - # if Python is listed as a dependency, then mention Python version(s) in PR title - pyver = [] - for ec in file_info['ecs']: - # iterate over all dependencies (incl. build dependencies & multi-deps) - for dep in ec.dependencies(): - if dep['name'] == 'Python': - # check whether Python is listed as a multi-dep if it's marked as a build dependency - if dep['build_only'] and 'Python' not in ec['multi_deps']: - continue - else: - pyver.append(dep['version']) - if pyver: - title += " w/ Python %s" % ' + '.join(sorted(nub(pyver))) - - else: - raise EasyBuildError("Don't know how to make a PR title for this PR. " - "Please include a title (use --pr-title)") + if title is None: + raise EasyBuildError("Don't know how to make a PR title for this PR. " + "Please include a title (use --pr-title)") full_descr = "(created using `eb --new-pr`)\n" if descr is not None: @@ -1446,7 +1489,6 @@ def new_pr_from_branch(branch_name, title=None, descr=None, pr_metadata=None): pr_target_branch = build_option('pr_target_branch') dry_run = build_option('dry_run') or build_option('extended_dry_run') - pr_target_repo = build_option('pr_target_repo') msg = '\n'.join([ '', "Opening pull request%s" % ('', " [DRY RUN]")[dry_run], @@ -1510,12 +1552,13 @@ def new_pr(paths, ecs, title=None, descr=None, commit_msg=None): # create new branch in GitHub res = new_branch_github(paths, ecs, commit_msg=commit_msg) - file_info, deleted_paths, _, branch_name, diff_stat = res + file_info, deleted_paths, _, branch_name, diff_stat, pr_target_repo = res - new_pr_from_branch(branch_name, title=title, descr=descr, pr_metadata=(file_info, deleted_paths, diff_stat)) + new_pr_from_branch(branch_name, title=title, descr=descr, pr_target_repo=pr_target_repo, + pr_metadata=(file_info, deleted_paths, diff_stat)) -def det_account_branch_for_pr(pr_id, github_user=None): +def det_account_branch_for_pr(pr_id, github_user=None, pr_target_repo=None): """Determine account & branch corresponding to pull request with specified id.""" if github_user is None: @@ -1525,7 +1568,8 @@ def det_account_branch_for_pr(pr_id, github_user=None): raise EasyBuildError("GitHub username (--github-user) must be specified!") pr_target_account = build_option('pr_target_account') - pr_target_repo = build_option('pr_target_repo') + if pr_target_repo is None: + pr_target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_data, _ = fetch_pr_data(pr_id, pr_target_account, pr_target_repo, github_user) @@ -1538,6 +1582,47 @@ def det_account_branch_for_pr(pr_id, github_user=None): return account, branch +def det_pr_target_repo(paths): + """Determine target repository for pull request from given cagetorized list of files + + :param paths: paths to categorized lists of files (easyconfigs, files to delete, patches, .py files) + """ + pr_target_repo = build_option('pr_target_repo') + + # determine target repository for PR based on which files are provided + # (see categorize_files_by_type function) + if pr_target_repo is None: + + _log.info("Trying to derive target repository based on specified files...") + + easyconfigs, files_to_delete, patch_files, py_files = [paths[key] for key in sorted(paths.keys())] + + # Python files provided, and no easyconfig files or patches + if py_files and not (easyconfigs or patch_files): + + _log.info("Only Python files provided, no easyconfig files or patches...") + + # if all Python files are easyblocks, target repo should be easyblocks; + # otherwise, target repo is assumed to be framework + if all([get_easyblock_class_name(path) for path in py_files]): + pr_target_repo = GITHUB_EASYBLOCKS_REPO + _log.info("All Python files are easyblocks, target repository is assumed to be %s", pr_target_repo) + else: + pr_target_repo = GITHUB_FRAMEWORK_REPO + _log.info("Not all Python files are easyblocks, target repository is assumed to be %s", pr_target_repo) + + # if no Python files are provided, only easyconfigs & patches, or if files to delete are .eb files, + # then target repo is assumed to be easyconfigs + elif easyconfigs or patch_files or (files_to_delete and all(x.endswith('.eb') for x in files_to_delete)): + pr_target_repo = GITHUB_EASYCONFIGS_REPO + _log.info("Only easyconfig and patch files found, target repository is assumed to be %s", pr_target_repo) + + else: + _log.info("No Python files, easyconfigs or patches found, can't derive target repository...") + + return pr_target_repo + + @only_if_module_is_available('git', pkgname='GitPython') def update_branch(branch_name, paths, ecs, github_account=None, commit_msg=None): """ @@ -1557,12 +1642,13 @@ def update_branch(branch_name, paths, ecs, github_account=None, commit_msg=None) if github_account is None: github_account = build_option('github_user') or build_option('github_org') - _, _, _, _, diff_stat = _easyconfigs_pr_common(paths, ecs, start_branch=branch_name, pr_branch=branch_name, - start_account=github_account, commit_msg=commit_msg) + _, _, _, _, diff_stat, pr_target_repo = _easyconfigs_pr_common(paths, ecs, start_branch=branch_name, + pr_branch=branch_name, start_account=github_account, + commit_msg=commit_msg) print_msg("Overview of changes:\n%s\n" % diff_stat, log=_log, prefix=False) - full_repo = '%s/%s' % (github_account, build_option('pr_target_repo')) + full_repo = '%s/%s' % (github_account, pr_target_repo) msg = "pushed updated branch '%s' to %s" % (branch_name, full_repo) if build_option('dry_run') or build_option('extended_dry_run'): msg += " [DRY RUN]" @@ -1580,17 +1666,44 @@ def update_pr(pr_id, paths, ecs, commit_msg=None): :param commit_msg: commit message to use """ - github_account, branch_name = det_account_branch_for_pr(pr_id) + pr_target_repo = det_pr_target_repo(paths) + if pr_target_repo is None: + raise EasyBuildError("Failed to determine target repository, please specify it via --pr-target-repo!") + + github_account, branch_name = det_account_branch_for_pr(pr_id, pr_target_repo=pr_target_repo) update_branch(branch_name, paths, ecs, github_account=github_account, commit_msg=commit_msg) - full_repo = '%s/%s' % (build_option('pr_target_account'), build_option('pr_target_repo')) + full_repo = '%s/%s' % (build_option('pr_target_account'), pr_target_repo) msg = "updated https://github.com/%s/pull/%s" % (full_repo, pr_id) if build_option('dry_run') or build_option('extended_dry_run'): msg += " [DRY RUN]" print_msg(msg, log=_log) +def check_online_status(): + """ + Check whether we currently are online + Return True if online, else a list of error messages + """ + # Try repeatedly and with different URLs to cater for flaky servers + # E.g. Github returned "HTTP Error 403: Forbidden" and "HTTP Error 406: Not Acceptable" randomly + # Timeout and repeats set to total 1 minute + urls = [GITHUB_API_URL + '/rate_limit', GITHUB_URL, GITHUB_API_URL] + num_repeats = 6 + errors = set() # Use set to record only unique errors + for attempt in range(num_repeats): + # Cycle through URLs + url = urls[attempt % len(urls)] + try: + urlopen(url, timeout=10) + errors = None + break + except URLError as err: + errors.add('%s: %s' % (url, err)) + return sorted(errors) if errors else True + + def check_github(): """ Check status of GitHub integration, and report back. @@ -1601,6 +1714,8 @@ def check_github(): * check whether creating gists works * check whether location to local working directories for Git repositories is available (not strictly needed) """ + debug = build_option('debug') + # start by assuming that everything works, individual checks will disable action that won't work status = {} for action in ['--from-pr', '--new-pr', '--review-pr', '--upload-test-report', '--update-pr']: @@ -1609,12 +1724,12 @@ def check_github(): print_msg("\nChecking status of GitHub integration...\n", log=_log, prefix=False) # check whether we're online; if not, half of the checks are going to fail... - try: - print_msg("Making sure we're online...", log=_log, prefix=False, newline=False) - urlopen(GITHUB_URL, timeout=5) + print_msg("Making sure we're online...", log=_log, prefix=False, newline=False) + online_state = check_online_status() + if online_state is True: print_msg("OK\n", log=_log, prefix=False) - except URLError as err: - print_msg("FAIL") + else: + print_msg("FAIL (%s)", ', '.join(online_state), log=_log, prefix=False) raise EasyBuildError("checking status of GitHub integration must be done online") # GitHub user @@ -1693,9 +1808,9 @@ def check_github(): git_repo, res, push_err = None, None, None branch_name = 'test_branch_%s' % ''.join(random.choice(ascii_letters) for _ in range(5)) try: - git_repo = init_repo(git_working_dir, GITHUB_EASYCONFIGS_REPO, silent=True) + git_repo = init_repo(git_working_dir, GITHUB_EASYCONFIGS_REPO, silent=not debug) remote_name = setup_repo(git_repo, github_account, GITHUB_EASYCONFIGS_REPO, 'master', - silent=True, git_only=True) + silent=not debug, git_only=True) git_repo.create_head(branch_name) res = getattr(git_repo.remotes, remote_name).push(branch_name) except Exception as err: @@ -1726,12 +1841,11 @@ def check_github(): print_msg(check_res, log=_log, prefix=False) # cleanup: delete test branch that was pushed to GitHub - if git_repo: + if git_repo and push_err is None: try: - if git_repo and hasattr(git_repo, 'remotes') and hasattr(git_repo.remotes, 'origin'): - git_repo.remotes.origin.push(branch_name, delete=True) + getattr(git_repo.remotes, remote_name).push(branch_name, delete=True) except GitCommandError as err: - sys.stderr.write("WARNNIG: failed to delete test branch from GitHub: %s\n" % err) + sys.stderr.write("WARNING: failed to delete test branch from GitHub: %s\n" % err) # test creating a gist print_msg("* creating gists...", log=_log, prefix=False, newline=False) @@ -2015,7 +2129,7 @@ def sync_pr_with_develop(pr_id): raise EasyBuildError("GitHub user must be specified to use --sync-pr-with-develop") target_account = build_option('pr_target_account') - target_repo = build_option('pr_target_repo') + target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO pr_account, pr_branch = det_account_branch_for_pr(pr_id) @@ -2038,7 +2152,7 @@ def sync_branch_with_develop(branch_name): raise EasyBuildError("GitHub user must be specified to use --sync-branch-with-develop") target_account = build_option('pr_target_account') - target_repo = build_option('pr_target_repo') + target_repo = build_option('pr_target_repo') or GITHUB_EASYCONFIGS_REPO # initialize repository git_working_dir = tempfile.mkdtemp(prefix='git-working-dir') @@ -2053,3 +2167,11 @@ def sync_branch_with_develop(branch_name): # push updated branch back to GitHub (unless we're doing a dry run) return push_branch_to_github(git_repo, github_account, target_repo, branch_name) + + +# copy functions for --new-pr +COPY_FUNCTIONS = { + GITHUB_EASYCONFIGS_REPO: copy_easyconfigs, + GITHUB_EASYBLOCKS_REPO: copy_easyblocks, + GITHUB_FRAMEWORK_REPO: copy_framework_files, +} diff --git a/easybuild/tools/include.py b/easybuild/tools/include.py index 90b9715280..2e85d99e20 100644 --- a/easybuild/tools/include.py +++ b/easybuild/tools/include.py @@ -31,6 +31,7 @@ import os import re import sys +import tempfile from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError @@ -147,14 +148,17 @@ def is_software_specific_easyblock(module): def include_easyblocks(tmpdir, paths): """Include generic and software-specific easyblocks found in specified locations.""" - easyblocks_path = os.path.join(tmpdir, 'included-easyblocks') + easyblocks_path = tempfile.mkdtemp(dir=tmpdir, prefix='included-easyblocks-') set_up_eb_package(easyblocks_path, 'easybuild.easyblocks', subpkgs=['generic'], pkg_init_body=EASYBLOCKS_PKG_INIT_BODY) easyblocks_dir = os.path.join(easyblocks_path, 'easybuild', 'easyblocks') - allpaths = [p for p in expand_glob_paths(paths) if os.path.basename(p) != '__init__.py'] + allpaths = [p for p in expand_glob_paths(paths) + if os.path.basename(p).endswith('.py') and + os.path.basename(p) != '__init__.py'] + for easyblock_module in allpaths: filename = os.path.basename(easyblock_module) diff --git a/easybuild/tools/options.py b/easybuild/tools/options.py index d149ee3d79..89af72c9f6 100644 --- a/easybuild/tools/options.py +++ b/easybuild/tools/options.py @@ -55,13 +55,14 @@ from easybuild.framework.easyconfig.format.pyheaderconfigobj import build_easyconfig_constants_dict from easybuild.framework.easyconfig.format.yeb import YEB_FORMAT_EXTENSION from easybuild.framework.easyconfig.tools import alt_easyconfig_paths, get_paths_for +from easybuild.toolchains.compiler.systemcompiler import TC_CONSTANT_SYSTEM from easybuild.tools import build_log, run # build_log should always stay there, to ensure EasyBuildLog from easybuild.tools.build_log import DEVEL_LOG_LEVEL, EasyBuildError from easybuild.tools.build_log import init_logging, log_start, print_warning, raise_easybuilderror -from easybuild.tools.config import CONT_IMAGE_FORMATS, CONT_TYPES, DEFAULT_CONT_TYPE -from easybuild.tools.config import DEFAULT_ALLOW_LOADED_MODULES, DEFAULT_FORCE_DOWNLOAD, DEFAULT_JOB_BACKEND -from easybuild.tools.config import DEFAULT_LOGFILE_FORMAT, DEFAULT_MAX_FAIL_RATIO_PERMS, DEFAULT_MNS -from easybuild.tools.config import DEFAULT_MODULE_SYNTAX, DEFAULT_MODULES_TOOL, DEFAULT_MODULECLASSES +from easybuild.tools.config import CONT_IMAGE_FORMATS, CONT_TYPES, DEFAULT_CONT_TYPE, DEFAULT_ALLOW_LOADED_MODULES +from easybuild.tools.config import DEFAULT_BRANCH, DEFAULT_FORCE_DOWNLOAD, DEFAULT_INDEX_MAX_AGE +from easybuild.tools.config import DEFAULT_JOB_BACKEND, DEFAULT_LOGFILE_FORMAT, DEFAULT_MAX_FAIL_RATIO_PERMS +from easybuild.tools.config import DEFAULT_MNS, DEFAULT_MODULE_SYNTAX, DEFAULT_MODULES_TOOL, DEFAULT_MODULECLASSES from easybuild.tools.config import DEFAULT_PATH_SUBDIRS, DEFAULT_PKG_RELEASE, DEFAULT_PKG_TOOL, DEFAULT_PKG_TYPE from easybuild.tools.config import DEFAULT_PNS, DEFAULT_PREFIX, DEFAULT_REPOSITORY, EBROOT_ENV_VAR_ACTIONS, ERROR from easybuild.tools.config import FORCE_DOWNLOAD_CHOICES, GENERAL_CLASS, IGNORE, JOB_DEPS_TYPE_ABORT_ON_ERROR @@ -79,7 +80,7 @@ from easybuild.tools.github import GITHUB_PR_DIRECTION_DESC, GITHUB_PR_ORDER_CREATED, GITHUB_PR_STATE_OPEN from easybuild.tools.github import GITHUB_PR_STATES, GITHUB_PR_ORDERS, GITHUB_PR_DIRECTIONS from easybuild.tools.github import HAVE_GITHUB_API, HAVE_KEYRING, VALID_CLOSE_PR_REASONS -from easybuild.tools.github import fetch_github_token +from easybuild.tools.github import fetch_easyblocks_from_pr, fetch_github_token from easybuild.tools.hooks import KNOWN_HOOKS from easybuild.tools.include import include_easyblocks, include_module_naming_schemes, include_toolchains from easybuild.tools.job.backend import avail_job_backends @@ -92,9 +93,10 @@ from easybuild.tools.run import run_cmd from easybuild.tools.package.utilities import avail_package_naming_schemes from easybuild.tools.toolchain.compiler import DEFAULT_OPT_LEVEL, OPTARCH_MAP_CHAR, OPTARCH_SEP, Compiler +from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME from easybuild.tools.repository.repository import avail_repositories -from easybuild.tools.systemtools import check_python_version, get_cpu_architecture, get_cpu_family, get_cpu_features -from easybuild.tools.systemtools import get_system_info +from easybuild.tools.systemtools import UNKNOWN, check_python_version, get_cpu_architecture, get_cpu_family +from easybuild.tools.systemtools import get_cpu_features, get_system_info from easybuild.tools.version import this_is_easybuild @@ -253,8 +255,13 @@ def basic_options(self): 'extended-dry-run-ignore-errors': ("Ignore errors that occur during dry run", None, 'store_true', True), 'force': ("Force to rebuild software even if it's already installed (i.e. if it can be found as module), " "and skipping check for OS dependencies", None, 'store_true', False, 'f'), + 'ignore-locks': ("Ignore locks that prevent two identical installations running in parallel", + None, 'store_true', False), 'job': ("Submit the build as a job", None, 'store_true', False), 'logtostdout': ("Redirect main log to stdout", None, 'store_true', False, 'l'), + 'locks-dir': ("Directory to store lock files (should be on a shared filesystem); " + "None implies .locks subdirectory of software installation directory", + None, 'store_or_None', None), 'missing-modules': ("Print list of missing modules for dependencies of specified easyconfigs", None, 'store_true', False, 'M'), 'only-blocks': ("Only build listed blocks", 'strlist', 'extend', None, 'b', {'metavar': 'BLOCKS'}), @@ -432,6 +439,8 @@ def override_options(self): None, 'store_true', False), 'verify-easyconfig-filenames': ("Verify whether filename of specified easyconfigs matches with contents", None, 'store_true', False), + 'wait-on-lock': ("Wait interval (in seconds) to use when waiting for existing lock to be removed " + "(0: implies no waiting, but exiting with an error)", int, 'store', 0), 'zip-logs': ("Zip logs that are copied to install directory, using specified command", None, 'store_or_None', 'gzip'), @@ -592,6 +601,8 @@ def github_options(self): 'git-working-dirs-path': ("Path to Git working directories for EasyBuild repositories", str, 'store', None), 'github-user': ("GitHub username", str, 'store', None), 'github-org': ("GitHub organization", str, 'store', None), + 'include-easyblocks-from-pr': ("Include easyblocks from specified PR", int, 'store', None, + {'metavar': 'PR#'}), 'install-github-token': ("Install GitHub token (requires --github-user)", None, 'store_true', False), 'close-pr': ("Close pull request", int, 'store', None, {'metavar': 'PR#'}), 'close-pr-msg': ("Custom close message for pull request closed with --close-pr; ", str, 'store', None), @@ -609,8 +620,9 @@ def github_options(self): 'pr-commit-msg': ("Commit message for new/updated pull request created with --new-pr", str, 'store', None), 'pr-descr': ("Description for new pull request created with --new-pr", str, 'store', None), 'pr-target-account': ("Target account for new PRs", str, 'store', GITHUB_EB_MAIN), - 'pr-target-branch': ("Target branch for new PRs", str, 'store', 'develop'), - 'pr-target-repo': ("Target repository for new/updating PRs", str, 'store', GITHUB_EASYCONFIGS_REPO), + 'pr-target-branch': ("Target branch for new PRs", str, 'store', DEFAULT_BRANCH), + 'pr-target-repo': ("Target repository for new/updating PRs (default: auto-detect based on provided files)", + str, 'store', None), 'pr-title': ("Title for new pull request created with --new-pr", str, 'store', None), 'preview-pr': ("Preview a new pull request", None, 'store_true', False), 'sync-branch-with-develop': ("Sync branch with current 'develop' branch", str, 'store', None), @@ -681,8 +693,12 @@ def easyconfig_options(self): descr = ("Options for Easyconfigs", "Options that affect all specified easyconfig files.") opts = OrderedDict({ + 'create-index': ("Create index for files in specified directory", None, 'store', None), 'fix-deprecated-easyconfigs': ("Fix use of deprecated functionality in specified easyconfig files.", None, 'store_true', False), + 'ignore-index': ("Ignore index when searching for files", None, 'store_true', False), + 'index-max-age': ("Maximum age for index before it is considered stale (in seconds)", + int, 'store', DEFAULT_INDEX_MAX_AGE), 'inject-checksums': ("Inject checksums of specified type for sources/patches into easyconfig file(s)", 'choice', 'store_or_None', CHECKSUM_TYPE_SHA256, CHECKSUM_TYPES), 'local-var-naming-check': ("Mode to use when checking whether local variables follow the recommended " @@ -738,8 +754,11 @@ def validate(self): for opt in ['software', 'try-software', 'toolchain', 'try-toolchain']: val = getattr(self.options, opt.replace('-', '_')) if val and len(val) != 2: - msg = "--%s requires NAME,VERSION (given %s)" % (opt, ','.join(val)) - error_msgs.append(msg) + if opt in ['toolchain', 'try-toolchain'] and val == [TC_CONSTANT_SYSTEM]: + setattr(self.options, opt.replace('-', '_'), [SYSTEM_TOOLCHAIN_NAME, SYSTEM_TOOLCHAIN_NAME]) + else: + msg = "--%s requires NAME,VERSION (given %s)" % (opt, ','.join(val)) + error_msgs.append(msg) if self.options.umask: umask_regex = re.compile('^[0-7]{3}$') @@ -922,7 +941,7 @@ def _postprocess_checks(self): """Check whether (combination of) configuration options make sense.""" # fail early if required dependencies for functionality requiring using GitHub API are not available: - if self.options.from_pr or self.options.upload_test_report: + if self.options.from_pr or self.options.include_easyblocks_from_pr or self.options.upload_test_report: if not HAVE_GITHUB_API: raise EasyBuildError("Required support for using GitHub API is not available (see warnings)") @@ -1040,8 +1059,8 @@ def _postprocess_list_avail(self): if self.options.avail_easyconfig_licenses: msg += avail_easyconfig_licenses(self.options.output_format) - # dump available easyblocks - if self.options.list_easyblocks: + # dump available easyblocks (unless including easyblocks from pr, in which case it will be done later) + if self.options.list_easyblocks and not self.options.include_easyblocks_from_pr: msg += list_easyblocks(self.options.list_easyblocks, self.options.output_format) # dump known toolchains @@ -1085,7 +1104,8 @@ def _postprocess_list_avail(self): print(msg) # cleanup tmpdir and exit - cleanup_and_exit(self.tmpdir) + if not self.options.include_easyblocks_from_pr: + cleanup_and_exit(self.tmpdir) def avail_repositories(self): """Show list of known repository types.""" @@ -1148,6 +1168,7 @@ def show_system_info(self): """Show system information.""" system_info = get_system_info() cpu_features = get_cpu_features() + cpu_arch_name = system_info['cpu_arch_name'] lines = [ "System information (%s):" % system_info['hostname'], '', @@ -1161,6 +1182,13 @@ def show_system_info(self): " -> vendor: %s" % system_info['cpu_vendor'], " -> architecture: %s" % get_cpu_architecture(), " -> family: %s" % get_cpu_family(), + ] + if cpu_arch_name == UNKNOWN: + lines.append(" -> arch name: UNKNOWN (archspec is not installed?)") + else: + lines.append(" -> arch name: %s" % cpu_arch_name) + + lines.extend([ " -> model: %s" % system_info['cpu_model'], " -> speed: %s" % system_info['cpu_speed'], " -> cores: %s" % system_info['core_count'], @@ -1170,7 +1198,8 @@ def show_system_info(self): " -> glibc version: %s" % system_info['glibc_version'], " -> Python binary: %s" % sys.executable, " -> Python version: %s" % sys.version.split(' ')[0], - ] + ]) + return '\n'.join(lines) def show_config(self): @@ -1393,6 +1422,29 @@ def set_up_configuration(args=None, logfile=None, testing=False, silent=False): init(options, config_options_dict) init_build_options(build_options=build_options, cmdline_options=options) + # done here instead of in _postprocess_include because github integration requires build_options to be initialized + if eb_go.options.include_easyblocks_from_pr: + easyblocks_from_pr = fetch_easyblocks_from_pr(eb_go.options.include_easyblocks_from_pr) + + if eb_go.options.include_easyblocks: + # make sure we're not including the same easyblock twice + included_from_pr = set([os.path.basename(eb) for eb in easyblocks_from_pr]) + included_from_file = set([os.path.basename(eb) for eb in eb_go.options.include_easyblocks]) + included_twice = included_from_pr & included_from_file + if included_twice: + raise EasyBuildError("Multiple inclusion of %s, check your --include-easyblocks options", + ','.join(included_twice)) + + include_easyblocks(eb_go.options.tmpdir, easyblocks_from_pr) + + if eb_go.options.list_easyblocks: + msg = list_easyblocks(eb_go.options.list_easyblocks, eb_go.options.output_format) + if eb_go.options.unittest_file: + log.info(msg) + else: + print(msg) + cleanup_and_exit(tmpdir) + check_python_version() # move directory containing fake vsc namespace into temporary directory used for this session diff --git a/easybuild/tools/parallelbuild.py b/easybuild/tools/parallelbuild.py index da824d66fd..8a7348fb18 100644 --- a/easybuild/tools/parallelbuild.py +++ b/easybuild/tools/parallelbuild.py @@ -68,7 +68,7 @@ def build_easyconfigs_in_parallel(build_command, easyconfigs, output_dir='easybu :param output_dir: output directory :param prepare_first: prepare by runnning fetch step first for each easyconfig """ - _log.info("going to build these easyconfigs in parallel: %s", easyconfigs) + _log.info("going to build these easyconfigs in parallel: %s", [os.path.basename(ec['spec']) for ec in easyconfigs]) active_job_backend = job_backend() if active_job_backend is None: @@ -94,7 +94,7 @@ def build_easyconfigs_in_parallel(build_command, easyconfigs, output_dir='easybu prepare_easyconfig(easyconfig) # the new job will only depend on already submitted jobs - _log.info("creating job for ec: %s" % easyconfig['ec']) + _log.info("creating job for ec: %s" % os.path.basename(easyconfig['spec'])) new_job = create_job(active_job_backend, build_command, easyconfig, output_dir=output_dir) # filter out dependencies marked as external modules diff --git a/easybuild/tools/repository/gitrepo.py b/easybuild/tools/repository/gitrepo.py index f34a95088e..d9f84d6700 100644 --- a/easybuild/tools/repository/gitrepo.py +++ b/easybuild/tools/repository/gitrepo.py @@ -44,7 +44,7 @@ from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.repository.filerepo import FileRepository from easybuild.tools.utilities import only_if_module_is_available from easybuild.tools.version import VERSION @@ -188,6 +188,6 @@ def cleanup(self): """ try: self.wc = os.path.dirname(self.wc) - rmtree2(self.wc) + remove_dir(self.wc) except IOError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err) diff --git a/easybuild/tools/repository/hgrepo.py b/easybuild/tools/repository/hgrepo.py index 000dd9b5b8..cb121f5cb2 100644 --- a/easybuild/tools/repository/hgrepo.py +++ b/easybuild/tools/repository/hgrepo.py @@ -44,7 +44,7 @@ from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.repository.filerepo import FileRepository _log = fancylogger.getLogger('hgrepo', fname=False) @@ -192,6 +192,6 @@ def cleanup(self): Clean up mercurial working copy. """ try: - rmtree2(self.wc) + remove_dir(self.wc) except IOError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err) diff --git a/easybuild/tools/repository/svnrepo.py b/easybuild/tools/repository/svnrepo.py index 6dc0f3c7b0..24dfcb8811 100644 --- a/easybuild/tools/repository/svnrepo.py +++ b/easybuild/tools/repository/svnrepo.py @@ -44,7 +44,7 @@ from easybuild.base import fancylogger from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.filetools import rmtree2 +from easybuild.tools.filetools import remove_dir from easybuild.tools.repository.filerepo import FileRepository from easybuild.tools.utilities import only_if_module_is_available @@ -190,6 +190,6 @@ def cleanup(self): Clean up SVN working copy. """ try: - rmtree2(self.wc) + remove_dir(self.wc) except OSError as err: raise EasyBuildError("Can't remove working copy %s: %s", self.wc, err) diff --git a/easybuild/tools/run.py b/easybuild/tools/run.py index 7e22e8c0ad..a3471abc04 100644 --- a/easybuild/tools/run.py +++ b/easybuild/tools/run.py @@ -175,6 +175,9 @@ def run_cmd(cmd, log_ok=True, log_all=False, simple=False, inp=None, regexp=True if trace: trace_txt = "running command:\n" trace_txt += "\t[started at: %s]\n" % start_time.strftime('%Y-%m-%d %H:%M:%S') + trace_txt += "\t[working dir: %s]\n" % (path or os.getcwd()) + if inp: + trace_txt += "\t[input: %s]\n" % inp trace_txt += "\t[output logged in %s]\n" % cmd_log_fn trace_msg(trace_txt + '\t' + cmd_msg) @@ -300,6 +303,7 @@ def run_cmd_qa(cmd, qa, no_qa=None, log_ok=True, log_all=False, simple=False, re if trace: trace_txt = "running interactive command:\n" trace_txt += "\t[started at: %s]\n" % start_time.strftime('%Y-%m-%d %H:%M:%S') + trace_txt += "\t[working dir: %s]\n" % (path or os.getcwd()) trace_txt += "\t[output logged in %s]\n" % cmd_log_fn trace_msg(trace_txt + '\t' + cmd.strip()) diff --git a/easybuild/tools/systemtools.py b/easybuild/tools/systemtools.py index 1b285e191d..13e28cc6f3 100644 --- a/easybuild/tools/systemtools.py +++ b/easybuild/tools/systemtools.py @@ -59,6 +59,14 @@ _log.debug("Failed to import 'distro' Python module: %s", err) HAVE_DISTRO = False +try: + from archspec.cpu import host as archspec_cpu_host + HAVE_ARCHSPEC = True +except ImportError as err: + _log.debug("Failed to import 'archspec' Python module: %s", err) + HAVE_ARCHSPEC = False + + # Architecture constants AARCH32 = 'AArch32' @@ -344,6 +352,22 @@ def get_cpu_family(): return family +def get_cpu_arch_name(): + """ + Determine CPU architecture name via archspec (if available). + """ + cpu_arch_name = None + if HAVE_ARCHSPEC: + res = archspec_cpu_host() + if res: + cpu_arch_name = res.name + + if cpu_arch_name is None: + cpu_arch_name = UNKNOWN + + return cpu_arch_name + + def get_cpu_model(): """ Determine CPU model, e.g., Intel(R) Core(TM) i5-2540M CPU @ 2.60GHz @@ -563,6 +587,7 @@ def get_os_name(): os_name_map = { 'red hat enterprise linux server': 'RHEL', + 'red hat enterprise linux': 'RHEL', # RHEL8 has no server/client 'scientific linux sl': 'SL', 'scientific linux': 'SL', 'suse linux enterprise server': 'SLES', @@ -745,6 +770,7 @@ def get_system_info(): return { 'core_count': get_avail_core_count(), 'total_memory': get_total_memory(), + 'cpu_arch_name': get_cpu_arch_name(), 'cpu_model': get_cpu_model(), 'cpu_speed': get_cpu_speed(), 'cpu_vendor': get_cpu_vendor(), diff --git a/easybuild/tools/testing.py b/easybuild/tools/testing.py index 456e7c0db2..73c1ea92a1 100644 --- a/easybuild/tools/testing.py +++ b/easybuild/tools/testing.py @@ -50,7 +50,7 @@ from easybuild.tools.jenkins import aggregate_xml_in_dirs from easybuild.tools.parallelbuild import build_easyconfigs_in_parallel from easybuild.tools.robot import resolve_dependencies -from easybuild.tools.systemtools import get_system_info +from easybuild.tools.systemtools import UNKNOWN, get_system_info from easybuild.tools.version import FRAMEWORK_VERSION, EASYBLOCKS_VERSION @@ -140,7 +140,10 @@ def session_state(): def create_test_report(msg, ecs_with_res, init_session_state, pr_nr=None, gist_log=False): """Create test report for easyconfigs PR, in Markdown format.""" - user = build_option('github_user') + + github_user = build_option('github_user') + pr_target_account = build_option('pr_target_account') + pr_target_repo = build_option('pr_target_repo') end_time = gmtime() @@ -148,7 +151,7 @@ def create_test_report(msg, ecs_with_res, init_session_state, pr_nr=None, gist_l test_report = [] if pr_nr is not None: test_report.extend([ - "Test report for https://github.com/easybuilders/easybuild-easyconfigs/pull/%s" % pr_nr, + "Test report for https://github.com/%s/%s/pull/%s" % (pr_target_account, pr_target_repo, pr_nr), "", ]) test_report.extend([ @@ -182,7 +185,7 @@ def create_test_report(msg, ecs_with_res, init_session_state, pr_nr=None, gist_l if pr_nr is not None: descr += " (PR #%s)" % pr_nr fn = '%s_partial.log' % os.path.basename(ec['spec'])[:-3] - gist_url = create_gist(partial_log_txt, fn, descr=descr, github_user=user) + gist_url = create_gist(partial_log_txt, fn, descr=descr, github_user=github_user) test_log = "(partial log available at %s)" % gist_url build_overview.append(" * **%s** _%s_ %s" % (test_result, os.path.basename(ec['spec']), test_log)) @@ -239,15 +242,16 @@ def upload_test_report_as_gist(test_report, descr=None, fn=None): if fn is None: fn = 'easybuild_test_report_%s.md' % strftime("%Y%M%d-UTC-%H-%M-%S", gmtime()) - user = build_option('github_user') + github_user = build_option('github_user') + gist_url = create_gist(test_report, descr=descr, fn=fn, github_user=github_user) - gist_url = create_gist(test_report, descr=descr, fn=fn, github_user=user) return gist_url def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, success): """Post test report in a gist, and submit comment in easyconfigs PR.""" - user = build_option('github_user') + + github_user = build_option('github_user') # create gist with test report descr = "EasyBuild test report for easyconfigs PR #%s" % pr_nr @@ -256,6 +260,11 @@ def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, # post comment to report test result system_info = init_session_state['system_info'] + + # also mention CPU architecture name, but only if it's known + if system_info['cpu_arch_name'] != UNKNOWN: + system_info['cpu_model'] += " (%s)" % system_info['cpu_arch_name'] + short_system_info = "%(hostname)s - %(os_type)s %(os_name)s %(os_version)s, %(cpu_model)s, Python %(pyver)s" % { 'cpu_model': system_info['cpu_model'], 'hostname': system_info['hostname'], @@ -264,15 +273,20 @@ def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, 'os_version': system_info['os_version'], 'pyver': system_info['python_version'].split(' ')[0], } + comment_lines = [ - "Test report by @%s" % user, + "Test report by @%s" % github_user, ('**FAILED**', '**SUCCESS**')[success], msg, short_system_info, "See %s for a full test report." % gist_url, ] comment = '\n'.join(comment_lines) - post_comment_in_issue(pr_nr, comment, github_user=user) + + pr_target_account = build_option('pr_target_account') + pr_target_repo = build_option('pr_target_repo') + + post_comment_in_issue(pr_nr, comment, account=pr_target_account, repo=pr_target_repo, github_user=github_user) msg = "Test report uploaded to %s and mentioned in a comment in easyconfigs PR#%s" % (gist_url, pr_nr) return msg diff --git a/easybuild/tools/toolchain/mpi.py b/easybuild/tools/toolchain/mpi.py index 9a30baa33f..052c3e061d 100644 --- a/easybuild/tools/toolchain/mpi.py +++ b/easybuild/tools/toolchain/mpi.py @@ -166,6 +166,22 @@ def mpi_family(self): else: raise EasyBuildError("mpi_family: MPI_FAMILY is undefined.") + def mpi_cmd_prefix(self, nr_ranks=1): + """Construct an MPI command prefix to precede an executable""" + + # Verify that the command appears at the end of mpi_cmd_for + test_cmd = 'xxx_command_xxx' + mpi_cmd = self.mpi_cmd_for(test_cmd, nr_ranks) + if mpi_cmd.rstrip().endswith(test_cmd): + result = mpi_cmd.replace(test_cmd, '').rstrip() + else: + warning_msg = "mpi_cmd_for cannot be used by mpi_cmd_prefix, " + warning_msg += "requires that %(cmd)s template appears at the end" + self.log.warning(warning_msg) + result = None + + return result + def mpi_cmd_for(self, cmd, nr_ranks): """Construct an MPI command for the given command and number of ranks.""" @@ -180,10 +196,10 @@ def mpi_cmd_for(self, cmd, nr_ranks): self.log.info("Using specified template for MPI commands: %s", mpi_cmd_template) else: # different known mpirun commands - mpirun_n_cmd = "mpirun -n %(nr_ranks)d %(cmd)s" + mpirun_n_cmd = "mpirun -n %(nr_ranks)s %(cmd)s" mpi_cmds = { toolchain.OPENMPI: mpirun_n_cmd, - toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)d %(cmd)s", + toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)s %(cmd)s", toolchain.INTELMPI: mpirun_n_cmd, toolchain.MVAPICH2: mpirun_n_cmd, toolchain.MPICH: mpirun_n_cmd, @@ -201,7 +217,7 @@ def mpi_cmd_for(self, cmd, nr_ranks): impi_ver = self.get_software_version(self.MPI_MODULE_NAME)[0] if LooseVersion(impi_ver) <= LooseVersion('4.1'): - mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)d %(cmd)s" + mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s" # set temporary dir for MPD # note: this needs to be kept *short*, @@ -230,7 +246,7 @@ def mpi_cmd_for(self, cmd, nr_ranks): # create nodes file nodes = os.path.join(tmpdir, 'nodes') - write_file(nodes, "localhost\n" * nr_ranks) + write_file(nodes, "localhost\n" * int(nr_ranks)) params.update({'nodesfile': "-machinefile %s" % nodes}) @@ -240,9 +256,19 @@ def mpi_cmd_for(self, cmd, nr_ranks): else: raise EasyBuildError("Don't know which template MPI command to use for MPI family '%s'", mpi_family) + missing = [] + for key in sorted(params.keys()): + tmpl = '%(' + key + ')s' + if tmpl not in mpi_cmd_template: + missing.append(tmpl) + if missing: + raise EasyBuildError("Missing templates in mpi-cmd-template value '%s': %s", + mpi_cmd_template, ', '.join(missing)) + try: res = mpi_cmd_template % params except KeyError as err: - raise EasyBuildError("Failed to complete MPI cmd template '%s' with %s: %s", mpi_cmd_template, params, err) + raise EasyBuildError("Failed to complete MPI cmd template '%s' with %s: KeyError %s", + mpi_cmd_template, params, err) return res diff --git a/easybuild/tools/version.py b/easybuild/tools/version.py index ca7d7a8e65..ae1710966d 100644 --- a/easybuild/tools/version.py +++ b/easybuild/tools/version.py @@ -43,7 +43,7 @@ # recent setuptools versions will *TRANSFORM* something like 'X.Y.Zdev' into 'X.Y.Z.dev0', with a warning like # UserWarning: Normalizing '2.4.0dev' to '2.4.0.dev0' # This causes problems further up the dependency chain... -VERSION = LooseVersion('4.1.1') +VERSION = LooseVersion('4.2.0.dev0') UNKNOWN = 'UNKNOWN' @@ -55,18 +55,18 @@ def get_git_revision(): relies on GitPython (see http://gitorious.org/git-python) """ try: - import git + from git import Git, GitCommandError except ImportError: return UNKNOWN try: path = os.path.dirname(__file__) - gitrepo = git.Git(path) + gitrepo = Git(path) res = gitrepo.rev_list('HEAD').splitlines()[0] # 'encode' may be required to make sure a regular string is returned rather than a unicode string # (only needed in Python 2; in Python 3, regular strings are already unicode) if not isinstance(res, str): res = res.encode('ascii') - except git.GitCommandError: + except GitCommandError: res = UNKNOWN return res diff --git a/requirements.txt b/requirements.txt index b7aa408d58..0a8591c50f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,10 @@ keyring==5.7.1; python_version < '2.7' keyring<=9.1; python_version >= '2.7' keyrings.alt; python_version >= '2.7' +# GitDB 4.0.1 no longer supports Python 2.6 +gitdb==0.6.4; python_version < '2.7' +gitdb; python_version >= '2.7' + # GitPython 2.1.9 no longer supports Python 2.6 GitPython==2.1.8; python_version < '2.7' GitPython; python_version >= '2.7' @@ -46,3 +50,5 @@ GC3Pie python-graph-dot python-hglib requests + +archspec; python_version >= '2.7' diff --git a/setup.cfg b/setup.cfg index e4bfd7cb81..430d761b59 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,4 +19,4 @@ builtins = # ignore "Black would make changes" produced by flake8-black # see also https://github.com/houndci/hound/issues/1769 -ignore = BLK100 +extend-ignore = BLK100 diff --git a/test/framework/build_log.py b/test/framework/build_log.py index 7af9e623f1..0c9ec6894b 100644 --- a/test/framework/build_log.py +++ b/test/framework/build_log.py @@ -237,11 +237,11 @@ def test_log_levels(self): def test_print_warning(self): """Test print_warning""" - def run_check(args, silent=False, expected_stderr=''): + def run_check(args, silent=False, expected_stderr='', **kwargs): """Helper function to check stdout/stderr produced via print_warning.""" self.mock_stderr(True) self.mock_stdout(True) - print_warning(*args, silent=silent) + print_warning(*args, silent=silent, **kwargs) stderr = self.get_stderr() stdout = self.get_stdout() self.mock_stdout(False) @@ -258,6 +258,14 @@ def run_check(args, silent=False, expected_stderr=''): self.assertErrorRegex(EasyBuildError, "Unknown named arguments", print_warning, 'foo', unknown_arg='bar') + # test passing of logger to print_warning + tmp_logfile = os.path.join(self.test_prefix, 'test.log') + logger, _ = init_logging(tmp_logfile, silent=True) + expected = "\nWARNING: Test log message with a logger involved.\n\n" + run_check(["Test log message with a logger involved."], expected_stderr=expected, log=logger) + log_txt = read_file(tmp_logfile) + self.assertTrue("WARNING Test log message with a logger involved." in log_txt) + def test_print_error(self): """Test print_error""" def run_check(args, silent=False, expected_stderr=''): diff --git a/test/framework/easyblock.py b/test/framework/easyblock.py index 00033c106a..057bcfb56e 100644 --- a/test/framework/easyblock.py +++ b/test/framework/easyblock.py @@ -51,7 +51,7 @@ from easybuild.tools.modules import reset_module_caches from easybuild.tools.utilities import time2str from easybuild.tools.version import get_git_revision, this_is_easybuild - +from easybuild.tools.py2vs3 import string_type class EasyBlockTest(EnhancedTestCase): """ Baseclass for easyblock testcases """ @@ -318,11 +318,10 @@ def test_make_module_req(self): os.makedirs(eb.installdir) open(os.path.join(eb.installdir, 'foo.jar'), 'w').write('foo.jar') open(os.path.join(eb.installdir, 'bla.jar'), 'w').write('bla.jar') - os.mkdir(os.path.join(eb.installdir, 'bin')) - os.mkdir(os.path.join(eb.installdir, 'bin', 'testdir')) - os.mkdir(os.path.join(eb.installdir, 'sbin')) - os.mkdir(os.path.join(eb.installdir, 'share')) - os.mkdir(os.path.join(eb.installdir, 'share', 'man')) + for path in ('bin', ('bin', 'testdir'), 'sbin', 'share', ('share', 'man'), 'lib', 'lib64'): + if isinstance(path, string_type): + path = (path, ) + os.mkdir(os.path.join(eb.installdir, *path)) # this is not a path that should be picked up os.mkdir(os.path.join(eb.installdir, 'CPATH')) @@ -332,6 +331,7 @@ def test_make_module_req(self): self.assertTrue(re.search(r"^prepend-path\s+CLASSPATH\s+\$root/bla.jar$", guess, re.M)) self.assertTrue(re.search(r"^prepend-path\s+CLASSPATH\s+\$root/foo.jar$", guess, re.M)) self.assertTrue(re.search(r"^prepend-path\s+MANPATH\s+\$root/share/man$", guess, re.M)) + self.assertTrue(re.search(r"^prepend-path\s+CMAKE_PREFIX_PATH\s+\$root$", guess, re.M)) # bin/ is not added to $PATH if it doesn't include files self.assertFalse(re.search(r"^prepend-path\s+PATH\s+\$root/bin$", guess, re.M)) self.assertFalse(re.search(r"^prepend-path\s+PATH\s+\$root/sbin$", guess, re.M)) @@ -341,6 +341,7 @@ def test_make_module_req(self): self.assertTrue(re.search(r'^prepend_path\("CLASSPATH", pathJoin\(root, "bla.jar"\)\)$', guess, re.M)) self.assertTrue(re.search(r'^prepend_path\("CLASSPATH", pathJoin\(root, "foo.jar"\)\)$', guess, re.M)) self.assertTrue(re.search(r'^prepend_path\("MANPATH", pathJoin\(root, "share/man"\)\)$', guess, re.M)) + self.assertTrue('prepend_path("CMAKE_PREFIX_PATH", root)' in guess) # bin/ is not added to $PATH if it doesn't include files self.assertFalse(re.search(r'^prepend_path\("PATH", pathJoin\(root, "bin"\)\)$', guess, re.M)) self.assertFalse(re.search(r'^prepend_path\("PATH", pathJoin\(root, "sbin"\)\)$', guess, re.M)) @@ -361,6 +362,41 @@ def test_make_module_req(self): else: self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax()) + # Check that lib64 is only added to CMAKE_LIBRARY_PATH if there are files in there + # but only if it is not a symlink to lib + # -- No Files + if get_module_syntax() == 'Tcl': + self.assertFalse(re.search(r"^prepend-path\s+CMAKE_LIBRARY_PATH\s+\$root/lib64$", guess, re.M)) + elif get_module_syntax() == 'Lua': + self.assertFalse('prepend_path("CMAKE_LIBRARY_PATH", pathJoin(root, "lib64"))' in guess) + # -- With files + open(os.path.join(eb.installdir, 'lib64', 'libfoo.so'), 'w').write('test') + guess = eb.make_module_req() + if get_module_syntax() == 'Tcl': + self.assertTrue(re.search(r"^prepend-path\s+CMAKE_LIBRARY_PATH\s+\$root/lib64$", guess, re.M)) + elif get_module_syntax() == 'Lua': + self.assertTrue('prepend_path("CMAKE_LIBRARY_PATH", pathJoin(root, "lib64"))' in guess) + # -- With files in lib and lib64 symlinks to lib + open(os.path.join(eb.installdir, 'lib', 'libfoo.so'), 'w').write('test') + shutil.rmtree(os.path.join(eb.installdir, 'lib64')) + os.symlink('lib', os.path.join(eb.installdir, 'lib64')) + guess = eb.make_module_req() + if get_module_syntax() == 'Tcl': + self.assertFalse(re.search(r"^prepend-path\s+CMAKE_LIBRARY_PATH\s+\$root/lib64$", guess, re.M)) + elif get_module_syntax() == 'Lua': + self.assertFalse('prepend_path("CMAKE_LIBRARY_PATH", pathJoin(root, "lib64"))' in guess) + + # With files in /lib and /lib64 symlinked to /lib there should be exactly 1 entry for (LD_)LIBRARY_PATH + # pointing to /lib + for var in ('LIBRARY_PATH', 'LD_LIBRARY_PATH'): + if get_module_syntax() == 'Tcl': + self.assertFalse(re.search(r"^prepend-path\s+%s\s+\$root/lib64$" % var, guess, re.M)) + self.assertEqual(len(re.findall(r"^prepend-path\s+%s\s+\$root/lib$" % var, guess, re.M)), 1) + elif get_module_syntax() == 'Lua': + self.assertFalse(re.search(r'^prepend_path\("%s", pathJoin\(root, "lib64"\)\)$' % var, guess, re.M)) + self.assertEqual(len(re.findall(r'^prepend_path\("%s", pathJoin\(root, "lib"\)\)$' % var, + guess, re.M)), 1) + # check for behavior when a string value is used as dict value by make_module_req_guesses eb.make_module_req_guess = lambda: {'PATH': 'bin'} txt = eb.make_module_req() @@ -786,7 +822,6 @@ def test_extensions_step(self): def test_skip_extensions_step(self): """Test the skip_extensions_step""" - init_config(build_options={'silent': True}) self.contents = cleandoc(""" easyblock = "ConfigureMake" @@ -797,11 +832,12 @@ def test_skip_extensions_step(self): toolchain = SYSTEM exts_list = [ "ext1", - ("ext2", "42", {"source_tmpl": "dummy.tgz"}), + ("EXT-2", "42", {"source_tmpl": "dummy.tgz"}), ("ext3", "1.1", {"source_tmpl": "dummy.tgz", "modulename": "real_ext"}), + "ext4", ] exts_filter = ("\ - if [ %(ext_name)s == 'ext2' ] && [ %(ext_version)s == '42' ] && [[ %(src)s == *dummy.tgz ]];\ + if [ %(ext_name)s == 'ext_2' ] && [ %(ext_version)s == '42' ] && [[ %(src)s == *dummy.tgz ]];\ then exit 0;\ elif [ %(ext_name)s == 'real_ext' ]; then exit 0;\ else exit 1; fi", "") @@ -813,12 +849,30 @@ def test_skip_extensions_step(self): eb.builddir = config.build_path() eb.installdir = config.install_path() eb.skip = True + + self.mock_stdout(True) eb.extensions_step(fetch=True) - # 'ext1' should be in eb.exts - eb_exts = [y for x in eb.exts for y in x.values()] + stdout = self.get_stdout() + self.mock_stdout(False) + + patterns = [ + r"^== skipping extension EXT-2", + r"^== skipping extension ext3", + r"^== installing extension ext1 \(1/2\)\.\.\.", + r"^== installing extension ext4 \(2/2\)\.\.\.", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) + + # 'ext1' should be in eb.ext_instances + eb_exts = [x.name for x in eb.ext_instances] self.assertTrue('ext1' in eb_exts) - # 'ext2' should not - self.assertFalse('ext2' in eb_exts) + # 'EXT-2' should not + self.assertFalse('EXT-2' in eb_exts) + self.assertFalse('EXT_2' in eb_exts) + self.assertFalse('ext-2' in eb_exts) + self.assertFalse('ext_2' in eb_exts) # 'ext3' should not self.assertFalse('ext3' in eb_exts) diff --git a/test/framework/easyconfig.py b/test/framework/easyconfig.py index f956900f59..3b720f687c 100644 --- a/test/framework/easyconfig.py +++ b/test/framework/easyconfig.py @@ -464,7 +464,8 @@ def test_extensions_templates(self): ' "source_tmpl": "%(name)s-%(version_major_minor)s-py%(pymajver)s%(versionsuffix)s.tar.gz",', ' "patches": ["%(name)s-%(version)s_fix-silly-typo-in-printf-statement.patch"],', # use hacky prebuildopts that is picked up by 'EB_Toy' easyblock, to check whether templates are resolved - ' "prebuildopts": "gcc -O2 %(name)s.c -o toy-%(version)s && mv toy-%(version)s toy #",', + ' "prebuildopts": "gcc -O2 %(name)s.c -o toy-%(version)s &&' + + ' mv toy-%(version)s toy # echo installdir is %(installdir)s #",', ' }),', ']', ]) @@ -489,9 +490,12 @@ def test_extensions_templates(self): for patch in toy_ext.patches: patches.append(patch['path']) self.assertEqual(patches, [os.path.join(self.test_prefix, toy_patch_fn)]) + # define actual installation dir + pi_installdir = os.path.join(self.test_installpath, 'software', 'pi', '3.14-test') + expected_prebuildopts = 'gcc -O2 toy.c -o toy-0.0 && mv toy-0.0 toy # echo installdir is %s #' % pi_installdir expected = { 'patches': ['toy-0.0_fix-silly-typo-in-printf-statement.patch'], - 'prebuildopts': 'gcc -O2 toy.c -o toy-0.0 && mv toy-0.0 toy #', + 'prebuildopts': expected_prebuildopts, 'source_tmpl': 'toy-0.0-py3-test.tar.gz', 'source_urls': ['https://pypi.python.org/packages/source/t/toy'], } @@ -500,10 +504,9 @@ def test_extensions_templates(self): # also .cfg of Extension instance was updated correctly self.assertEqual(toy_ext.cfg['source_urls'], ['https://pypi.python.org/packages/source/t/toy']) self.assertEqual(toy_ext.cfg['patches'], [toy_patch_fn]) - self.assertEqual(toy_ext.cfg['prebuildopts'], "gcc -O2 toy.c -o toy-0.0 && mv toy-0.0 toy #") + self.assertEqual(toy_ext.cfg['prebuildopts'], expected_prebuildopts) # check whether files expected to be installed for 'toy' extension are in place - pi_installdir = os.path.join(self.test_installpath, 'software', 'pi', '3.14-test') self.assertTrue(os.path.exists(os.path.join(pi_installdir, 'bin', 'toy'))) self.assertTrue(os.path.exists(os.path.join(pi_installdir, 'lib', 'libtoy.a'))) @@ -1005,6 +1008,19 @@ def test_templating(self): eb['description'] = "test easyconfig % %% %s% %%% %(name)s %%(name)s %%%(name)s %%%%(name)s" self.assertEqual(eb['description'], "test easyconfig % %% %s% %%% PI %(name)s %PI %%(name)s") + # test use of %(mpi_cmd_prefix)s template + test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') + gompi_ec = os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0-gompi-2018a.eb') + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, read_file(gompi_ec) + "\nsanity_check_commands = ['%(mpi_cmd_prefix)s toy']") + + ec = EasyConfig(test_ec) + self.assertEqual(ec['sanity_check_commands'], ['mpirun -n 1 toy']) + + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s "}) + ec = EasyConfig(test_ec) + self.assertEqual(ec['sanity_check_commands'], ['mpiexec -np 1 -- toy']) + def test_templating_doc(self): """test templating documentation""" doc = avail_easyconfig_templates() @@ -1515,21 +1531,26 @@ def test_update(self): ec.update('description', "- just a test") self.assertEqual(ec['description'].strip(), "Toy C program, 100% toy. - just a test") - # spaces in between multiple updates for stirng values + # spaces in between multiple updates for string values ec.update('configopts', 'CC="$CC"') ec.update('configopts', 'CXX="$CXX"') self.assertTrue(ec['configopts'].strip().endswith('CC="$CC" CXX="$CXX"')) + # spaces in between multiple updates for string values from list + ec.update('configopts', ['MORE_VALUE', 'EVEN_MORE']) + self.assertTrue(ec['configopts'].strip().endswith('MORE_VALUE EVEN_MORE')) # for list values: extend ec.update('patches', ['foo.patch', 'bar.patch']) toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' self.assertEqual(ec['patches'], [toy_patch_fn, ('toy-extra.txt', 'toy-0.0'), 'foo.patch', 'bar.patch']) - # for unallowed duplicates + # for unallowed duplicates on string values ec.update('configopts', 'SOME_VALUE') configopts_tmp = ec['configopts'] ec.update('configopts', 'SOME_VALUE', allow_duplicate=False) self.assertEqual(ec['configopts'], configopts_tmp) + ec.update('configopts', ['CC="$CC"', 'SOME_VALUE'], allow_duplicate=False) + self.assertEqual(ec['configopts'], configopts_tmp) # for unallowed duplicates when a list is used ec.update('patches', ['foo2.patch', 'bar2.patch']) @@ -2713,17 +2734,26 @@ def test_hidden_toolchain(self): def test_categorize_files_by_type(self): """Test categorize_files_by_type""" - self.assertEqual({'easyconfigs': [], 'files_to_delete': [], 'patch_files': []}, categorize_files_by_type([])) + self.assertEqual({'easyconfigs': [], 'files_to_delete': [], 'patch_files': [], 'py_files': []}, + categorize_files_by_type([])) - test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs',) + test_dir = os.path.dirname(os.path.abspath(__file__)) + test_ecs_dir = os.path.join(test_dir, 'easyconfigs') toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' toy_patch = os.path.join(os.path.dirname(test_ecs_dir), 'sandbox', 'sources', 'toy', toy_patch_fn) + + easyblocks_dir = os.path.join(test_dir, 'sandbox', 'easybuild', 'easyblocks') + configuremake = os.path.join(easyblocks_dir, 'generic', 'configuremake.py') + toy_easyblock = os.path.join(easyblocks_dir, 't', 'toy.py') + paths = [ 'bzip2-1.0.6.eb', + toy_easyblock, os.path.join(test_ecs_dir, 'test_ecs', 'g', 'gzip', 'gzip-1.4.eb'), toy_patch, 'foo', ':toy-0.0-deps.eb', + configuremake, ] res = categorize_files_by_type(paths) expected = [ @@ -2734,6 +2764,7 @@ def test_categorize_files_by_type(self): self.assertEqual(res['easyconfigs'], expected) self.assertEqual(res['files_to_delete'], ['toy-0.0-deps.eb']) self.assertEqual(res['patch_files'], [toy_patch]) + self.assertEqual(res['py_files'], [toy_easyblock, configuremake]) def test_resolve_template(self): """Test resolve_template function.""" @@ -2968,12 +2999,19 @@ def test_get_paths_for(self): def test_is_generic_easyblock(self): """Test for is_generic_easyblock function.""" + # is_generic_easyblock in easyconfig.py is deprecated, moved to filetools.py + self.allow_deprecated_behaviour() + + self.mock_stderr(True) + for name in ['Binary', 'ConfigureMake', 'CMakeMake', 'PythonPackage', 'JAR']: self.assertTrue(is_generic_easyblock(name)) for name in ['EB_bzip2', 'EB_DL_underscore_POLY_underscore_Classic', 'EB_GCC', 'EB_WRF_minus_Fire']: self.assertFalse(is_generic_easyblock(name)) + self.mock_stderr(False) + def test_get_module_path(self): """Test get_module_path function.""" self.assertEqual(get_module_path('EB_bzip2', generic=False), 'easybuild.easyblocks.bzip2') diff --git a/test/framework/filetools.py b/test/framework/filetools.py index 640176ee33..f03d126e8f 100644 --- a/test/framework/filetools.py +++ b/test/framework/filetools.py @@ -38,6 +38,7 @@ import stat import sys import tempfile +import time from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, init_config from unittest import TextTestRunner @@ -268,7 +269,7 @@ def test_checksums(self): # checksum of length 32 is assumed to be MD5, length 64 to be SHA256, other lengths not allowed # checksum of length other than 32/64 yields an error - error_pattern = "Length of checksum '.*' \(\d+\) does not match with either MD5 \(32\) or SHA256 \(64\)" + error_pattern = r"Length of checksum '.*' \(\d+\) does not match with either MD5 \(32\) or SHA256 \(64\)" for checksum in ['tooshort', 'inbetween32and64charactersisnotgoodeither', known_checksums['sha256'] + 'foo']: self.assertErrorRegex(EasyBuildError, error_pattern, ft.verify_checksum, fp, checksum) @@ -584,7 +585,7 @@ def test_read_write_file(self): txt2 = '\n'.join(['test', '123']) ft.write_file(fp, txt2, append=True) - self.assertEqual(ft.read_file(fp), txt+txt2) + self.assertEqual(ft.read_file(fp), txt + txt2) # test backing up of existing file ft.write_file(fp, 'foo', backup=True) @@ -656,6 +657,16 @@ def test_read_write_file(self): # test use of 'mode' in read_file self.assertEqual(ft.read_file(foo, mode='rb'), b'bar') + def test_is_binary(self): + """Test is_binary function.""" + + for test in ['foo', '', b'foo', b'', "This is just a test", b"This is just a test", b"\xa0"]: + self.assertFalse(ft.is_binary(test)) + + self.assertTrue(ft.is_binary(b'\00')) + self.assertTrue(ft.is_binary(b"File is binary when it includes \00 somewhere")) + self.assertTrue(ft.is_binary(ft.read_file('/bin/ls', mode='rb'))) + def test_det_patched_files(self): """Test det_patched_files function.""" toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' @@ -663,6 +674,18 @@ def test_det_patched_files(self): self.assertEqual(ft.det_patched_files(pf), ['b/toy-0.0/toy.source']) self.assertEqual(ft.det_patched_files(pf, omit_ab_prefix=True), ['toy-0.0/toy.source']) + # create a patch file with a non-UTF8 character in it, should not result in problems + # (see https://github.com/easybuilders/easybuild-framework/issues/3190) + test_patch = os.path.join(self.test_prefix, 'test.patch') + patch_txt = b'\n'.join([ + b"--- foo", + b"+++ foo", + b"- test line", + b"+ test line with non-UTF8 char: '\xa0'", + ]) + ft.write_file(test_patch, patch_txt) + self.assertEqual(ft.det_patched_files(test_patch), ['foo']) + def test_guess_patch_level(self): "Test guess_patch_level.""" # create dummy toy.source file so guess_patch_level can work @@ -1652,6 +1675,129 @@ def test_remove(self): ft.adjust_permissions(self.test_prefix, stat.S_IWUSR, add=True) + def test_index_functions(self): + """Test *_index functions.""" + + test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + + # create_index checks whether specified path is an existing directory + doesnotexist = os.path.join(self.test_prefix, 'doesnotexist') + self.assertErrorRegex(EasyBuildError, "Specified path does not exist", ft.create_index, doesnotexist) + + toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') + self.assertErrorRegex(EasyBuildError, "Specified path is not a directory", ft.create_index, toy_ec) + + # load_index just returns None if there is no index in specified directory + self.assertEqual(ft.load_index(self.test_prefix), None) + + # create index for test easyconfigs; + # test with specified path with and without trailing '/'s + for path in [test_ecs, test_ecs + '/', test_ecs + '//']: + index = ft.create_index(path) + self.assertEqual(len(index), 79) + + expected = [ + os.path.join('b', 'bzip2', 'bzip2-1.0.6-GCC-4.9.2.eb'), + os.path.join('t', 'toy', 'toy-0.0.eb'), + os.path.join('s', 'ScaLAPACK', 'ScaLAPACK-2.0.2-gompi-2018a-OpenBLAS-0.2.20.eb'), + ] + for fn in expected: + self.assertTrue(fn in index) + + for fp in index: + self.assertTrue(fp.endswith('.eb')) + + # set up some files to create actual index file for + ft.copy_dir(os.path.join(test_ecs, 'g'), os.path.join(self.test_prefix, 'g')) + + # test dump_index function + index_fp = ft.dump_index(self.test_prefix) + self.assertTrue(os.path.exists(index_fp)) + self.assertTrue(os.path.samefile(self.test_prefix, os.path.dirname(index_fp))) + + datestamp_pattern = r"[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+" + expected_header = [ + "# created at: " + datestamp_pattern, + "# valid until: " + datestamp_pattern, + ] + expected = [ + os.path.join('g', 'gzip', 'gzip-1.4.eb'), + os.path.join('g', 'GCC', 'GCC-7.3.0-2.30.eb'), + os.path.join('g', 'gompic', 'gompic-2018a.eb'), + ] + index_txt = ft.read_file(index_fp) + for fn in expected_header + expected: + regex = re.compile('^%s$' % fn, re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + + # test load_index function + self.mock_stderr(True) + self.mock_stdout(True) + index = ft.load_index(self.test_prefix) + stderr = self.get_stderr() + stdout = self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertFalse(stderr) + regex = re.compile(r"^== found valid index for %s, so using it\.\.\.$" % self.test_prefix) + self.assertTrue(regex.match(stdout.strip()), "Pattern '%s' matches with: %s" % (regex.pattern, stdout)) + + self.assertEqual(len(index), 24) + for fn in expected: + self.assertTrue(fn in index, "%s should be found in %s" % (fn, sorted(index))) + + # dump_index will not overwrite existing index without force + error_pattern = "File exists, not overwriting it without --force" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.dump_index, self.test_prefix) + + ft.remove_file(index_fp) + + # test creating index file that's infinitely valid + index_fp = ft.dump_index(self.test_prefix, max_age_sec=0) + index_txt = ft.read_file(index_fp) + expected_header[1] = r"# valid until: 9999-12-31 23:59:59\.9+" + for fn in expected_header + expected: + regex = re.compile('^%s$' % fn, re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + + self.mock_stderr(True) + self.mock_stdout(True) + index = ft.load_index(self.test_prefix) + stderr = self.get_stderr() + stdout = self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertFalse(stderr) + regex = re.compile(r"^== found valid index for %s, so using it\.\.\.$" % self.test_prefix) + self.assertTrue(regex.match(stdout.strip()), "Pattern '%s' matches with: %s" % (regex.pattern, stdout)) + + self.assertEqual(len(index), 24) + for fn in expected: + self.assertTrue(fn in index, "%s should be found in %s" % (fn, sorted(index))) + + ft.remove_file(index_fp) + + # test creating index file that's only valid for a (very) short amount of time + index_fp = ft.dump_index(self.test_prefix, max_age_sec=1) + time.sleep(3) + self.mock_stderr(True) + self.mock_stdout(True) + index = ft.load_index(self.test_prefix) + stderr = self.get_stderr() + stdout = self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + self.assertTrue(index is None) + self.assertFalse(stdout) + regex = re.compile(r"WARNING: Index for %s is no longer valid \(too old\), so ignoring it" % self.test_prefix) + self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr)) + + # check whether load_index takes into account --ignore-index + init_config(build_options={'ignore_index': True}) + self.assertEqual(ft.load_index(self.test_prefix), None) + def test_search_file(self): """Test search_file function.""" test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') @@ -1800,7 +1946,7 @@ def test_move_file(self): self.mock_stderr(False) # informative message printed, but file was not actually moved - regex = re.compile("^moved file .*/test\.txt to .*/new_test\.txt$") + regex = re.compile(r"^moved file .*/test\.txt to .*/new_test\.txt$") self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) self.assertEqual(stderr, '') @@ -1863,7 +2009,7 @@ def test_diff_files(self): ]) res = ft.diff_files(foo, bar) self.assertTrue(res.endswith(expected), "%s ends with %s" % (res, expected)) - regex = re.compile('^--- .*/foo\s*\n\+\+\+ .*/bar\s*$', re.M) + regex = re.compile(r'^--- .*/foo\s*\n\+\+\+ .*/bar\s*$', re.M) self.assertTrue(regex.search(res), "Pattern '%s' found in: %s" % (regex.pattern, res)) def test_get_source_tarball_from_git(self): @@ -1871,7 +2017,7 @@ def test_get_source_tarball_from_git(self): git_config = { 'repo_name': 'testrepository', - 'url': 'https://github.com/hpcugent', + 'url': 'https://github.com/easybuilders', 'tag': 'master', } target_dir = os.path.join(self.test_prefix, 'target') @@ -1896,7 +2042,7 @@ def test_get_source_tarball_from_git(self): git_config = { 'repo_name': 'testrepository', - 'url': 'git@github.com:hpcugent', + 'url': 'git@github.com:easybuilders', 'tag': 'master', } args = ['test.tar.gz', self.test_prefix, git_config] @@ -1950,46 +2096,56 @@ def run_check(): git_config = { 'repo_name': 'testrepository', - 'url': 'git@github.com:hpcugent', + 'url': 'git@github.com:easybuilders', 'tag': 'master', } expected = '\n'.join([ - ' running command "git clone --branch master git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone --branch master git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() git_config['recursive'] = True expected = '\n'.join([ - ' running command "git clone --branch master --recursive git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone --branch master --recursive git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() + git_config['keep_git_dir'] = True + expected = '\n'.join([ + r' running command "git clone --branch master --recursive git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "tar cfvz .*/target/test.tar.gz testrepository"', + r" \(in .*/tmp.*\)", + ]) + run_check() + del git_config['keep_git_dir'] + del git_config['tag'] git_config['commit'] = '8456f86' expected = '\n'.join([ - ' running command "git clone --recursive git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "git checkout 8456f86 && git submodule update"', - " \(in testrepository\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone --recursive git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "git checkout 8456f86 && git submodule update"', + r" \(in testrepository\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() del git_config['recursive'] expected = '\n'.join([ - ' running command "git clone git@github.com:hpcugent/testrepository.git"', - " \(in .*/tmp.*\)", - ' running command "git checkout 8456f86"', - " \(in testrepository\)", - ' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', - " \(in .*/tmp.*\)", + r' running command "git clone git@github.com:easybuilders/testrepository.git"', + r" \(in .*/tmp.*\)", + r' running command "git checkout 8456f86"', + r" \(in testrepository\)", + r' running command "tar cfvz .*/target/test.tar.gz --exclude .git testrepository"', + r" \(in .*/tmp.*\)", ]) run_check() @@ -2004,7 +2160,7 @@ def test_is_sha256_checksum(self): True, 12345, '', - (a_sha256_checksum, ), + (a_sha256_checksum,), [], ]: self.assertFalse(ft.is_sha256_checksum(not_a_sha256_checksum)) @@ -2065,6 +2221,153 @@ def test_fake_vsc(self): from test_fake_vsc import pkgutil self.assertTrue(pkgutil.__file__.endswith('/test_fake_vsc/pkgutil.py')) + def test_is_generic_easyblock(self): + """Test for is_generic_easyblock function.""" + + for name in ['Binary', 'ConfigureMake', 'CMakeMake', 'PythonPackage', 'JAR']: + self.assertTrue(ft.is_generic_easyblock(name)) + + for name in ['EB_bzip2', 'EB_DL_underscore_POLY_underscore_Classic', 'EB_GCC', 'EB_WRF_minus_Fire']: + self.assertFalse(ft.is_generic_easyblock(name)) + + def test_get_easyblock_class_name(self): + """Test for get_easyblock_class_name function.""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + test_ebs = os.path.join(topdir, 'sandbox', 'easybuild', 'easyblocks') + + configuremake = os.path.join(test_ebs, 'generic', 'configuremake.py') + self.assertEqual(ft.get_easyblock_class_name(configuremake), 'ConfigureMake') + + gcc_eb = os.path.join(test_ebs, 'g', 'gcc.py') + self.assertEqual(ft.get_easyblock_class_name(gcc_eb), 'EB_GCC') + + toy_eb = os.path.join(test_ebs, 't', 'toy.py') + self.assertEqual(ft.get_easyblock_class_name(toy_eb), 'EB_toy') + + def test_copy_easyblocks(self): + """Test for copy_easyblocks function.""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + test_ebs = os.path.join(topdir, 'sandbox', 'easybuild', 'easyblocks') + + # easybuild/easyblocks subdirectory must exist in target directory + error_pattern = "Could not find easybuild/easyblocks subdir in .*" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_easyblocks, [], self.test_prefix) + + easyblocks_dir = os.path.join(self.test_prefix, 'easybuild', 'easyblocks') + + # passing empty list works fine + ft.mkdir(easyblocks_dir, parents=True) + res = ft.copy_easyblocks([], self.test_prefix) + self.assertEqual(os.listdir(easyblocks_dir), []) + self.assertEqual(res, {'eb_names': [], 'new': [], 'paths_in_repo': []}) + + # check with different types of easyblocks + configuremake = os.path.join(test_ebs, 'generic', 'configuremake.py') + gcc_eb = os.path.join(test_ebs, 'g', 'gcc.py') + toy_eb = os.path.join(test_ebs, 't', 'toy.py') + test_ebs = [gcc_eb, configuremake, toy_eb] + + # copy them straight into tmpdir first, to check whether correct subdir is derived correctly + ft.copy_files(test_ebs, self.test_prefix) + + # touch empty toy.py easyblock, to check whether 'new' aspect is determined correctly + ft.write_file(os.path.join(easyblocks_dir, 't', 'toy.py'), '') + + # check whether easyblocks were copied as expected, and returned dict is correct + test_ebs = [os.path.join(self.test_prefix, os.path.basename(e)) for e in test_ebs] + res = ft.copy_easyblocks(test_ebs, self.test_prefix) + + self.assertEqual(sorted(res.keys()), ['eb_names', 'new', 'paths_in_repo']) + self.assertEqual(res['eb_names'], ['gcc', 'configuremake', 'toy']) + self.assertEqual(res['new'], [True, True, False]) # toy.py is not new + + self.assertEqual(sorted(os.listdir(easyblocks_dir)), ['g', 'generic', 't']) + + g_dir = os.path.join(easyblocks_dir, 'g') + self.assertEqual(sorted(os.listdir(g_dir)), ['gcc.py']) + copied_gcc_eb = os.path.join(g_dir, 'gcc.py') + self.assertEqual(ft.read_file(copied_gcc_eb), ft.read_file(gcc_eb)) + self.assertTrue(os.path.samefile(res['paths_in_repo'][0], copied_gcc_eb)) + + gen_dir = os.path.join(easyblocks_dir, 'generic') + self.assertEqual(sorted(os.listdir(gen_dir)), ['configuremake.py']) + copied_configuremake = os.path.join(gen_dir, 'configuremake.py') + self.assertEqual(ft.read_file(copied_configuremake), ft.read_file(configuremake)) + self.assertTrue(os.path.samefile(res['paths_in_repo'][1], copied_configuremake)) + + t_dir = os.path.join(easyblocks_dir, 't') + self.assertEqual(sorted(os.listdir(t_dir)), ['toy.py']) + copied_toy_eb = os.path.join(t_dir, 'toy.py') + self.assertEqual(ft.read_file(copied_toy_eb), ft.read_file(toy_eb)) + self.assertTrue(os.path.samefile(res['paths_in_repo'][2], copied_toy_eb)) + + def test_copy_framework_files(self): + """Test for copy_framework_files function.""" + + target_dir = os.path.join(self.test_prefix, 'target') + ft.mkdir(target_dir) + + res = ft.copy_framework_files([], target_dir) + + self.assertEqual(os.listdir(target_dir), []) + self.assertEqual(res, {'paths_in_repo': [], 'new': []}) + + foo_py = os.path.join(self.test_prefix, 'foo.py') + ft.write_file(foo_py, '') + + error_pattern = "Specified path '.*/foo.py' does not include a 'easybuild-framework' directory!" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_framework_files, [foo_py], self.test_prefix) + + # create empty test/framework/modules.py, to check whether 'new' is set correctly in result + ft.write_file(os.path.join(target_dir, 'test', 'framework', 'modules.py'), '') + + topdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + test_files = [ + os.path.join('easybuild', 'tools', 'filetools.py'), + os.path.join('test', 'framework', 'modules.py'), + os.path.join('test', 'framework', 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz'), + ] + expected_entries = ['easybuild', 'test'] + # test/framework/modules.py is not new + expected_new = [True, False, True] + + # we include setup.py conditionally because it may not be there, + # for example when running the tests on an actual easybuild-framework instalation, + # as opposed to when running from a repository checkout... + # setup.py is an important test case, since it has no parent directory + # (it's straight in the easybuild-framework directory) + setup_py = 'setup.py' + if os.path.exists(os.path.join(topdir, setup_py)): + test_files.append(os.path.join(setup_py)) + expected_entries.append(setup_py) + expected_new.append(True) + + # files being copied are expected to be in a directory named 'easybuild-framework', + # so we need to make sure that's the case here as well (may not be in workspace dir on Travis from example) + framework_dir = os.path.join(self.test_prefix, 'easybuild-framework') + for test_file in test_files: + ft.copy_file(os.path.join(topdir, test_file), os.path.join(framework_dir, test_file)) + + test_paths = [os.path.join(framework_dir, f) for f in test_files] + + res = ft.copy_framework_files(test_paths, target_dir) + + self.assertEqual(sorted(os.listdir(target_dir)), sorted(expected_entries)) + + self.assertEqual(sorted(res.keys()), ['new', 'paths_in_repo']) + + for idx, test_file in enumerate(test_files): + orig_path = os.path.join(topdir, test_file) + copied_path = os.path.join(target_dir, test_file) + + self.assertTrue(os.path.exists(copied_path)) + self.assertEqual(ft.read_file(orig_path, mode='rb'), ft.read_file(copied_path, mode='rb')) + + self.assertTrue(os.path.samefile(copied_path, res['paths_in_repo'][idx])) + + self.assertEqual(res['new'], expected_new) def suite(): diff --git a/test/framework/github.py b/test/framework/github.py index 4b4c68c31c..bd1e7cecd4 100644 --- a/test/framework/github.py +++ b/test/framework/github.py @@ -37,8 +37,9 @@ from unittest import TextTestRunner from easybuild.base.rest import RestClient +from easybuild.framework.easyconfig.tools import categorize_files_by_type from easybuild.tools.build_log import EasyBuildError -from easybuild.tools.config import module_classes +from easybuild.tools.config import build_option, module_classes from easybuild.tools.configobj import ConfigObj from easybuild.tools.filetools import read_file, write_file from easybuild.tools.github import VALID_CLOSE_PR_REASONS @@ -54,8 +55,8 @@ # test account, for which a token may be available GITHUB_TEST_ACCOUNT = 'easybuild_test' -# the user & repo to use in this test (https://github.com/hpcugent/testrepository) -GITHUB_USER = "hpcugent" +# the user & repo to use in this test (https://github.com/easybuilders/testrepository) +GITHUB_USER = "easybuilders" GITHUB_REPO = "testrepository" # branch to test GITHUB_BRANCH = 'master' @@ -220,10 +221,10 @@ def test_close_pr(self): self.mock_stdout(False) patterns = [ - "hpcugent/testrepository PR #2 was submitted by migueldiascosta", + "easybuilders/testrepository PR #2 was submitted by migueldiascosta", "[DRY RUN] Adding comment to testrepository issue #2: '" + "@migueldiascosta, this PR is being closed for the following reason(s): just a test", - "[DRY RUN] Closed hpcugent/testrepository PR #2", + "[DRY RUN] Closed easybuilders/testrepository PR #2", ] for pattern in patterns: self.assertTrue(pattern in stdout, "Pattern '%s' found in: %s" % (pattern, stdout)) @@ -236,15 +237,42 @@ def test_close_pr(self): self.mock_stdout(False) patterns = [ - "hpcugent/testrepository PR #2 was submitted by migueldiascosta", + "easybuilders/testrepository PR #2 was submitted by migueldiascosta", "[DRY RUN] Adding comment to testrepository issue #2: '" + "@migueldiascosta, this PR is being closed for the following reason(s): %s" % retest_msg, - "[DRY RUN] Closed hpcugent/testrepository PR #2", - "[DRY RUN] Reopened hpcugent/testrepository PR #2", + "[DRY RUN] Closed easybuilders/testrepository PR #2", + "[DRY RUN] Reopened easybuilders/testrepository PR #2", ] for pattern in patterns: self.assertTrue(pattern in stdout, "Pattern '%s' found in: %s" % (pattern, stdout)) + def test_fetch_easyblocks_from_pr(self): + """Test fetch_easyblocks_from_pr function.""" + if self.skip_github_tests: + print("Skipping test_fetch_easyblocks_from_pr, no GitHub token available?") + return + + init_config(build_options={ + 'pr_target_account': gh.GITHUB_EB_MAIN, + }) + + # PR with new easyblock plus non-easyblock file + all_ebs_pr1964 = ['lammps.py'] + + # PR with changed easyblock + all_ebs_pr1967 = ['siesta.py'] + + # PR with more than one easyblock + all_ebs_pr1949 = ['configuremake.py', 'rpackage.py'] + + for pr, all_ebs in [(1964, all_ebs_pr1964), (1967, all_ebs_pr1967), (1949, all_ebs_pr1949)]: + try: + tmpdir = os.path.join(self.test_prefix, 'pr%s' % pr) + eb_files = gh.fetch_easyblocks_from_pr(pr, path=tmpdir, github_user=GITHUB_TEST_ACCOUNT) + self.assertEqual(sorted(all_ebs), sorted([os.path.basename(f) for f in eb_files])) + except URLError as err: + print("Ignoring URLError '%s' in test_fetch_easyblocks_from_pr" % err) + def test_fetch_easyconfigs_from_pr(self): """Test fetch_easyconfigs_from_pr function.""" if self.skip_github_tests: @@ -597,7 +625,7 @@ def test_restclient(self): client = RestClient('https://api.github.com', username=GITHUB_TEST_ACCOUNT, token=self.github_token) - status, body = client.repos['hpcugent']['testrepository'].contents.a_directory['a_file.txt'].get() + status, body = client.repos['easybuilders']['testrepository'].contents.a_directory['a_file.txt'].get() self.assertEqual(status, 200) # base64.b64encode requires & produces a 'bytes' value in Python 3, # but we need a string value hence the .decode() (also works in Python 2) @@ -666,6 +694,61 @@ def test_det_account_branch_for_pr(self): self.assertEqual(account, 'migueldiascosta') self.assertEqual(branch, 'fix_inject_checksums') + def test_det_pr_target_repo(self): + """Test det_pr_target_repo.""" + + self.assertEqual(build_option('pr_target_repo'), None) + + # no files => return default target repo (None) + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([])), None) + + # easyconfigs/patches (incl. files to delete) => easyconfigs repo + # this is solely based on filenames, actual files are not opened + test_cases = [ + ['toy.eb'], + ['toy.patch'], + ['toy.eb', 'toy.patch'], + [':toy.eb'], # deleting toy.eb + ['one.eb', 'two.eb'], + ['one.eb', 'two.eb', 'toy.patch', ':todelete.eb'], + ] + for test_case in test_cases: + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(test_case)), 'easybuild-easyconfigs') + + # if only Python files are involved, result is easyblocks or framework repo; + # all Python files are easyblocks => easyblocks repo, otherwise => framework repo; + # files are opened and inspected here to discriminate between easyblocks & other Python files, so must exist! + testdir = os.path.dirname(os.path.abspath(__file__)) + github_py = os.path.join(testdir, 'github.py') + + configuremake = os.path.join(testdir, 'sandbox', 'easybuild', 'easyblocks', 'generic', 'configuremake.py') + self.assertTrue(os.path.exists(configuremake)) + toy_eb = os.path.join(testdir, 'sandbox', 'easybuild', 'easyblocks', 't', 'toy.py') + self.assertTrue(os.path.exists(toy_eb)) + + self.assertEqual(build_option('pr_target_repo'), None) + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([github_py])), 'easybuild-framework') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([configuremake])), 'easybuild-easyblocks') + py_files = [github_py, configuremake] + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'easybuild-framework') + py_files[0] = toy_eb + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'easybuild-easyblocks') + py_files.append(github_py) + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'easybuild-framework') + + # as soon as an easyconfig file or patch files is involved => result is easybuild-easyconfigs repo + for fn in ['toy.eb', 'toy.patch']: + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files + [fn])), 'easybuild-easyconfigs') + + # if --pr-target-repo is specified, we always get this value (no guessing anymore) + init_config(build_options={'pr_target_repo': 'thisisjustatest'}) + + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([])), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(['toy.eb', 'toy.patch'])), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type(py_files)), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([configuremake])), 'thisisjustatest') + self.assertEqual(gh.det_pr_target_repo(categorize_files_by_type([toy_eb])), 'thisisjustatest') + def test_push_branch_to_github(self): """Test push_branch_to_github.""" diff --git a/test/framework/options.py b/test/framework/options.py index bcb3dcbe09..8f681b0cab 100644 --- a/test/framework/options.py +++ b/test/framework/options.py @@ -59,6 +59,7 @@ from easybuild.tools.py2vs3 import URLError, reload, sort_looseversions from easybuild.tools.toolchain.utilities import TC_CONST_PREFIX from easybuild.tools.run import run_cmd +from easybuild.tools.systemtools import HAVE_ARCHSPEC from easybuild.tools.version import VERSION from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, init_config @@ -776,6 +777,47 @@ def test_search(self): args = [opt, pattern, '--robot', test_easyconfigs_dir] self.assertErrorRegex(EasyBuildError, "Invalid search query", self.eb_main, args, raise_error=True) + def test_ignore_index(self): + """ + Test use of --ignore-index. + """ + + test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs') + toy_ec = os.path.join(test_ecs_dir, 'test_ecs', 't', 'toy', 'toy-0.0.eb') + copy_file(toy_ec, self.test_prefix) + + toy_ec_list = ['toy-0.0.eb', 'toy-1.2.3.eb', 'toy-4.5.6.eb'] + + # install index that list more files than are actually available, + # so we can check whether it's used + index_txt = '\n'.join(toy_ec_list) + write_file(os.path.join(self.test_prefix, '.eb-path-index'), index_txt) + + args = [ + '--search=toy', + '--robot-paths=%s' % self.test_prefix, + ] + self.mock_stdout(True) + self.eb_main(args, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + for toy_ec_fn in toy_ec_list: + regex = re.compile(re.escape(os.path.join(self.test_prefix, toy_ec_fn)), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + args.append('--ignore-index') + self.mock_stdout(True) + self.eb_main(args, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + regex = re.compile(re.escape(os.path.join(self.test_prefix, 'toy-0.0.eb')), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + for toy_ec_fn in ['toy-1.2.3.eb', 'toy-4.5.6.eb']: + regex = re.compile(re.escape(os.path.join(self.test_prefix, toy_ec_fn)), re.M) + self.assertFalse(regex.search(stdout), "Pattern '%s' should not be found in: %s" % (regex.pattern, stdout)) + def test_search_archived(self): "Test searching for archived easyconfigs" args = ['--search-filename=^intel'] @@ -836,6 +878,16 @@ def test_show_ec(self): def test_copy_ec(self): """Test --copy-ec.""" + def mocked_main(args): + self.mock_stderr(True) + self.mock_stdout(True) + self.eb_main(args, raise_error=True) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + self.assertEqual(stderr, '') + return stdout.strip() + topdir = os.path.dirname(os.path.abspath(__file__)) test_easyconfigs_dir = os.path.join(topdir, 'easyconfigs', 'test_ecs') @@ -845,7 +897,8 @@ def test_copy_ec(self): # basic test: copying one easyconfig file to a non-existing absolute path test_ec = os.path.join(self.test_prefix, 'test.eb') args = ['--copy-ec', 'toy-0.0.eb', test_ec] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, 'toy-0.0.eb copied to %s' % test_ec) self.assertTrue(os.path.exists(test_ec)) self.assertEqual(toy_ec_txt, read_file(test_ec)) @@ -858,7 +911,8 @@ def test_copy_ec(self): self.assertFalse(os.path.exists(target_fn)) args = ['--copy-ec', 'toy-0.0.eb', target_fn] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, 'toy-0.0.eb copied to test.eb') change_dir(cwd) @@ -869,7 +923,8 @@ def test_copy_ec(self): test_target_dir = os.path.join(self.test_prefix, 'test_target_dir') mkdir(test_target_dir) args = ['--copy-ec', 'toy-0.0.eb', test_target_dir] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, 'toy-0.0.eb copied to %s' % test_target_dir) copied_toy_ec = os.path.join(test_target_dir, 'toy-0.0.eb') self.assertTrue(os.path.exists(copied_toy_ec)) @@ -890,7 +945,8 @@ def check_copied_files(): # copying multiple easyconfig files to a non-existing target directory (which is created automatically) args = ['--copy-ec', 'toy-0.0.eb', 'bzip2-1.0.6-GCC-4.9.2.eb', test_target_dir] - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, '2 file(s) copied to %s' % test_target_dir) check_copied_files() @@ -901,7 +957,8 @@ def check_copied_files(): args[-1] = os.path.basename(test_target_dir) self.assertFalse(os.path.exists(args[-1])) - self.eb_main(args) + stdout = mocked_main(args) + self.assertEqual(stdout, '2 file(s) copied to test_target_dir') check_copied_files() @@ -912,6 +969,24 @@ def check_copied_files(): error_pattern = ".*/test.eb exists but is not a directory" self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) + # test use of --copy-ec with only one argument: copy to current working directory + test_working_dir = os.path.join(self.test_prefix, 'test_working_dir') + mkdir(test_working_dir) + change_dir(test_working_dir) + self.assertEqual(len(os.listdir(os.getcwd())), 0) + args = ['--copy-ec', 'toy-0.0.eb'] + stdout = mocked_main(args) + regex = re.compile('toy-0.0.eb copied to .*/%s' % os.path.basename(test_working_dir)) + self.assertTrue(regex.match(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) + copied_toy_cwd = os.path.join(test_working_dir, 'toy-0.0.eb') + self.assertTrue(os.path.exists(copied_toy_cwd)) + self.assertEqual(read_file(copied_toy_cwd), toy_ec_txt) + + # --copy-ec without arguments results in a proper error + args = ['--copy-ec'] + error_pattern = "One of more files to copy should be specified!" + self.assertErrorRegex(EasyBuildError, error_pattern, self.eb_main, args, raise_error=True) + def test_dry_run(self): """Test dry run (long format).""" fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log') @@ -1233,6 +1308,39 @@ def test_from_pr(self): print("Ignoring URLError '%s' in test_from_pr" % err) shutil.rmtree(tmpdir) + def test_from_pr_token_log(self): + """Check that --from-pr doesn't leak GitHub token in log.""" + if self.github_token is None: + print("Skipping test_from_pr_token_log, no GitHub token available?") + return + + fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log') + os.close(fd) + + args = [ + # PR for foss/2018b, see https://github.com/easybuilders/easybuild-easyconfigs/pull/6424/files + '--from-pr=6424', + '--dry-run', + '--debug', + # an argument must be specified to --robot, since easybuild-easyconfigs may not be installed + '--robot=%s' % os.path.join(os.path.dirname(__file__), 'easyconfigs'), + '--github-user=%s' % GITHUB_TEST_ACCOUNT, # a GitHub token should be available for this user + ] + try: + self.mock_stdout(True) + self.mock_stderr(True) + outtxt = self.eb_main(args, logfile=dummylogfn, raise_error=True) + stdout = self.get_stdout() + stderr = self.get_stderr() + self.mock_stdout(False) + self.mock_stderr(False) + self.assertFalse(self.github_token in outtxt) + self.assertFalse(self.github_token in stdout) + self.assertFalse(self.github_token in stderr) + + except URLError as err: + print("Ignoring URLError '%s' in test_from_pr" % err) + def test_from_pr_listed_ecs(self): """Test --from-pr in combination with specifying easyconfigs on the command line.""" if self.github_token is None: @@ -1702,6 +1810,9 @@ def test_try(self): (['--try-toolchain-name=gompi', '--try-toolchain-version=2018a'], 'toy/0.0-GCC-6.4.0.2.28'), # --try-toolchain is overridden by --toolchain (['--try-toolchain=gompi,2018a', '--toolchain=system,system'], 'toy/0.0'), + # check we interpret SYSTEM correctly as a toolchain + (['--try-toolchain=SYSTEM'], 'toy/0.0'), + (['--toolchain=SYSTEM'], 'toy/0.0'), (['--try-software-name=foo', '--try-software-version=1.2.3'], 'foo/1.2.3'), (['--try-toolchain-name=gompi', '--try-toolchain-version=2018a'], 'toy/0.0-GCC-6.4.0.2.28'), # combining --try-toolchain with other build options is too complicated, in this case the code defaults back @@ -2401,7 +2512,8 @@ def test_xxx_include_easyblocks(self): self.eb_main(args, logfile=dummylogfn, raise_error=True) logtxt = read_file(self.logfile) - path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks', 'easybuild', 'easyblocks', 'foo.py') + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks', + 'foo.py') foo_regex = re.compile(r"^\|-- EB_foo \(easybuild.easyblocks.foo @ %s\)" % path_pattern, re.M) self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt)) @@ -2444,7 +2556,7 @@ def test_xxx_include_generic_easyblocks(self): self.eb_main(args, logfile=dummylogfn, raise_error=True) logtxt = read_file(self.logfile) - path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks', 'easybuild', 'easyblocks', + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks', 'generic', 'foobar.py') foo_regex = re.compile(r"^\|-- FooBar \(easybuild.easyblocks.generic.foobar @ %s\)" % path_pattern, re.M) self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt)) @@ -2482,7 +2594,7 @@ def test_xxx_include_generic_easyblocks(self): logtxt = read_file(self.logfile) mod_pattern = 'easybuild.easyblocks.generic.generictest' - path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks', 'easybuild', 'easyblocks', + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks', 'generic', 'generictest.py') foo_regex = re.compile(r"^\|-- GenericTest \(%s @ %s\)" % (mod_pattern, path_pattern), re.M) self.assertTrue(foo_regex.search(logtxt), "Pattern '%s' found in: %s" % (foo_regex.pattern, logtxt)) @@ -2493,6 +2605,113 @@ def test_xxx_include_generic_easyblocks(self): # 'undo' import of foo easyblock del sys.modules['easybuild.easyblocks.generic.generictest'] + # must be run after test for --list-easyblocks, hence the '_xxx_' + # cleaning up the imported easyblocks is quite difficult... + def test_xxx_include_easyblocks_from_pr(self): + """Test --include-easyblocks-from-pr.""" + if self.github_token is None: + print("Skipping test_preview_pr, no GitHub token available?") + return + + orig_local_sys_path = sys.path[:] + fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log') + os.close(fd) + + # clear log + write_file(self.logfile, '') + + # include extra test easyblock + foo_txt = '\n'.join([ + 'from easybuild.framework.easyblock import EasyBlock', + 'class EB_foo(EasyBlock):', + ' pass', + '' + ]) + write_file(os.path.join(self.test_prefix, 'foo.py'), foo_txt) + + args = [ + '--include-easyblocks=%s/*.py' % self.test_prefix, # this shouldn't interfere + '--include-easyblocks-from-pr=1915', # a PR for CMakeMake easyblock + '--list-easyblocks=detailed', + '--unittest-file=%s' % self.logfile, + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + ] + self.eb_main(args, logfile=dummylogfn, raise_error=True) + logtxt = read_file(self.logfile) + + # easyblock included from pr is found + path_pattern = os.path.join(self.test_prefix, '.*', 'included-easyblocks-.*', 'easybuild', 'easyblocks') + cmm_pattern = os.path.join(path_pattern, 'generic', 'cmakemake.py') + cmm_regex = re.compile(r"\|-- CMakeMake \(easybuild.easyblocks.generic.cmakemake @ %s\)" % cmm_pattern, re.M) + self.assertTrue(cmm_regex.search(logtxt), "Pattern '%s' found in: %s" % (cmm_regex.pattern, logtxt)) + + # easyblock is found via get_easyblock_class + klass = get_easyblock_class('CMakeMake') + self.assertTrue(issubclass(klass, EasyBlock), "%s is an EasyBlock derivative class" % klass) + + # 'undo' import of easyblocks + del sys.modules['easybuild.easyblocks.foo'] + del sys.modules['easybuild.easyblocks.generic.cmakemake'] + os.remove(os.path.join(self.test_prefix, 'foo.py')) + sys.path = orig_local_sys_path + import easybuild.easyblocks + reload(easybuild.easyblocks) + import easybuild.easyblocks.generic + reload(easybuild.easyblocks.generic) + + # include test cmakemake easyblock + cmm_txt = '\n'.join([ + 'from easybuild.framework.easyblock import EasyBlock', + 'class CMakeMake(EasyBlock):', + ' pass', + '' + ]) + write_file(os.path.join(self.test_prefix, 'cmakemake.py'), cmm_txt) + + # including the same easyblock twice should fail + args = [ + '--include-easyblocks=%s/cmakemake.py' % self.test_prefix, + '--include-easyblocks-from-pr=1915', + '--list-easyblocks=detailed', + '--unittest-file=%s' % self.logfile, + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + ] + self.assertErrorRegex(EasyBuildError, + "Multiple inclusion of cmakemake.py, check your --include-easyblocks options", + self.eb_main, args, raise_error=True) + + os.remove(os.path.join(self.test_prefix, 'cmakemake.py')) + + # clear log + write_file(self.logfile, '') + + args = [ + '--from-pr=9979', # PR for CMake easyconfig + '--include-easyblocks-from-pr=1936', # PR for EB_CMake easyblock + '--unittest-file=%s' % self.logfile, + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + '--extended-dry-run', + ] + self.eb_main(args, logfile=dummylogfn, raise_error=True) + logtxt = read_file(self.logfile) + + # easyconfig from pr is found + ec_pattern = os.path.join(self.test_prefix, '.*', 'files_pr9979', 'c', 'CMake', + 'CMake-3.16.4-GCCcore-9.2.0.eb') + ec_regex = re.compile(r"Parsing easyconfig file %s" % ec_pattern, re.M) + self.assertTrue(ec_regex.search(logtxt), "Pattern '%s' found in: %s" % (ec_regex.pattern, logtxt)) + + # easyblock included from pr is found + eb_regex = re.compile(r"Successfully obtained EB_CMake class instance from easybuild.easyblocks.cmake", re.M) + self.assertTrue(eb_regex.search(logtxt), "Pattern '%s' found in: %s" % (eb_regex.pattern, logtxt)) + + # easyblock is found via get_easyblock_class + klass = get_easyblock_class('EB_CMake') + self.assertTrue(issubclass(klass, EasyBlock), "%s is an EasyBlock derivative class" % klass) + + # 'undo' import of easyblocks + del sys.modules['easybuild.easyblocks.cmake'] + def mk_eb_test_cmd(self, args): """Construct test command for 'eb' with given options.""" @@ -2710,17 +2929,17 @@ def test_review_pr(self): self.mock_stdout(True) self.mock_stderr(True) - # PR for CMake 3.12.1 easyconfig, see https://github.com/easybuilders/easybuild-easyconfigs/pull/6660 + # PR for gzip 1.10 easyconfig, see https://github.com/easybuilders/easybuild-easyconfigs/pull/9921 args = [ '--color=never', '--github-user=%s' % GITHUB_TEST_ACCOUNT, - '--review-pr=6660', + '--review-pr=9921', ] self.eb_main(args, raise_error=True) txt = self.get_stdout() self.mock_stdout(False) self.mock_stderr(False) - regex = re.compile(r"^Comparing CMake-3.12.1-\S* with CMake-3.12.1-") + regex = re.compile(r"^Comparing gzip-1.10-\S* with gzip-1.10-") self.assertTrue(regex.search(txt), "Pattern '%s' not found in: %s" % (regex.pattern, txt)) def test_set_tmpdir(self): @@ -2940,6 +3159,8 @@ def test_new_branch_github(self): return topdir = os.path.dirname(os.path.abspath(__file__)) + + # test easyconfigs test_ecs = os.path.join(topdir, 'easyconfigs', 'test_ecs') toy_ec = os.path.join(test_ecs, 't', 'toy', 'toy-0.0.eb') @@ -2954,11 +3175,60 @@ def test_new_branch_github(self): remote = 'git@github.com:%s/easybuild-easyconfigs.git' % GITHUB_TEST_ACCOUNT regexs = [ r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-easyconfigs.git\.\.\.", - r"^== copying easyconfigs to .*/easybuild-easyconfigs\.\.\.", + r"^== copying files to .*/easybuild-easyconfigs\.\.\.", + r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote, + ] + self._assert_regexs(regexs, txt) + + # test easyblocks + test_ebs = os.path.join(topdir, 'sandbox', 'easybuild', 'easyblocks') + toy_eb = os.path.join(test_ebs, 't', 'toy.py') + + args = [ + '--new-branch-github', + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + toy_eb, + '--pr-title="add easyblock for toy"', + '-D', + ] + txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) + + remote = 'git@github.com:%s/easybuild-easyblocks.git' % GITHUB_TEST_ACCOUNT + regexs = [ + r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-easyblocks.git\.\.\.", + r"^== copying files to .*/easybuild-easyblocks\.\.\.", r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote, ] self._assert_regexs(regexs, txt) + # test framework with tweaked copy of test_module_naming_scheme.py + test_mns_py = os.path.join(topdir, 'sandbox', 'easybuild', 'tools', 'module_naming_scheme', + 'test_module_naming_scheme.py') + target_dir = os.path.join(self.test_prefix, 'easybuild-framework', 'test', 'framework', 'sandbox', + 'easybuild', 'tools', 'module_naming_scheme') + mkdir(target_dir, parents=True) + copy_file(test_mns_py, target_dir) + test_mns_py = os.path.join(target_dir, os.path.basename(test_mns_py)) + write_file(test_mns_py, '\n\n', append=True) + + args = [ + '--new-branch-github', + '--github-user=%s' % GITHUB_TEST_ACCOUNT, + test_mns_py, + '--pr-commit-msg="a test"', + '-D', + ] + txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) + + remote = 'git@github.com:%s/easybuild-framework.git' % GITHUB_TEST_ACCOUNT + regexs = [ + r"^== fetching branch 'develop' from https://github.com/easybuilders/easybuild-framework.git\.\.\.", + r"^== copying files to .*/easybuild-framework\.\.\.", + r"^== pushing branch '.*' to remote '.*' \(%s\) \[DRY RUN\]" % remote, + ] + self._assert_regexs(regexs, txt) + + def test_new_pr_from_branch(self): """Test --new-pr-from-branch.""" if self.github_token is None: @@ -3019,7 +3289,7 @@ def test_update_branch_github(self): full_repo = 'boegel/easybuild-easyconfigs' regexs = [ r"^== fetching branch 'develop' from https://github.com/%s.git\.\.\." % full_repo, - r"^== copying easyconfigs to .*/git-working-dir.*/easybuild-easyconfigs...", + r"^== copying files to .*/git-working-dir.*/easybuild-easyconfigs...", r"^== pushing branch 'develop' to remote '.*' \(git@github.com:%s.git\) \[DRY RUN\]" % full_repo, r"^Overview of changes:\n.*/easyconfigs/t/toy/toy-0.0.eb \| 32", r"== pushed updated branch 'develop' to boegel/easybuild-easyconfigs \[DRY RUN\]", @@ -3426,6 +3696,7 @@ def test_merge_pr(self): '4781', # PR for easyconfig for EasyBuild-3.3.0.eb '-D', '--github-user=%s' % GITHUB_TEST_ACCOUNT, + '--pr-target-branch=some_branch', ] # merged PR for EasyBuild-3.3.0.eb, is missing approved review @@ -3433,12 +3704,12 @@ def test_merge_pr(self): expected_stdout = '\n'.join([ "Checking eligibility of easybuilders/easybuild-easyconfigs PR #4781 for merging...", - "* targets develop branch: OK", "* test suite passes: OK", "* last test report is successful: OK", "* milestone is set: OK (3.3.1)", ]) expected_stderr = '\n'.join([ + "* targets some_branch branch: FAILED; found 'develop' => not eligible for merging!", "* approved review: MISSING => not eligible for merging!", '', "WARNING: Review indicates this PR should not be merged (use -f/--force to do so anyway)", @@ -3446,7 +3717,8 @@ def test_merge_pr(self): self.assertEqual(stderr.strip(), expected_stderr) self.assertTrue(stdout.strip().endswith(expected_stdout), "'%s' ends with '%s'" % (stdout, expected_stdout)) - # full eligible merged PR + # full eligible merged PR, default target branch + del args[-1] args[1] = '4832' stdout, stderr = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) @@ -4111,6 +4383,29 @@ def test_check_contrib_non_style(self): for pattern in patterns: self.assertTrue(re.search(pattern, stdout, re.M), "Pattern '%s' found in: %s" % (pattern, stdout)) + # --check-contrib passes if None values are used as checksum, but produces warning + toy = os.path.join(self.test_prefix, 'toy.eb') + copy_file(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'), toy) + toytxt = read_file(toy) + toytxt = toytxt + '\n'.join([ + 'checksums = [', + " None, # toy-0.0.tar.gz", + " # toy-0.0_fix-silly-typo-in-printf-statement.patch", + " '45b5e3f9f495366830e1869bb2b8f4e7c28022739ce48d9f9ebb159b439823c5',", + " '4196b56771140d8e2468fb77f0240bc48ddbf5dabafe0713d612df7fafb1e458', # toy-extra.txt", + ']\n', + ]) + write_file(toy, toytxt) + + args = ['--check-contrib', toy] + self.mock_stdout(True) + self.mock_stderr(True) + self.eb_main(args, raise_error=True) + stderr = self.get_stderr().strip() + self.mock_stdout(False) + self.mock_stderr(False) + self.assertEqual(stderr, "WARNING: Found 1 None checksum value(s), please make sure this is intended!") + def test_allow_use_as_root(self): """Test --allow-use-as-root-and-accept-consequences""" @@ -4559,6 +4854,12 @@ def test_show_system_info(self): "^ -> Python binary: .*/[pP]ython[0-9]?", "^ -> Python version: [0-9.]+", ] + + if HAVE_ARCHSPEC: + patterns.append(r"^ -> arch name: \w+$") + else: + patterns.append(r"^ -> arch name: UNKNOWN \(archspec is not installed\?\)$") + for pattern in patterns: regex = re.compile(pattern, re.M) self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) @@ -4676,6 +4977,51 @@ def test_cuda_compute_capabilities(self): regex = re.compile(r"^cuda-compute-capabilities\s*\(C\)\s*=\s*3\.5, 6\.2, 7\.0$", re.M) self.assertTrue(regex.search(txt), "Pattern '%s' not found in: %s" % (regex.pattern, txt)) + def test_create_index(self): + """Test --create-index option.""" + test_ecs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') + remove_dir(self.test_prefix) + copy_dir(test_ecs, self.test_prefix) + + args = ['--create-index', self.test_prefix] + stdout, stderr = self._run_mock_eb(args, raise_error=True) + + self.assertEqual(stderr, '') + + patterns = [ + r"^Creating index for %s\.\.\.$", + r"^Index created at %s/\.eb-path-index \([0-9]+ files\)$", + ] + for pattern in patterns: + regex = re.compile(pattern % self.test_prefix, re.M) + self.assertTrue(regex.search(stdout), "Pattern %s matches in: %s" % (regex.pattern, stdout)) + + # check contents of index + index_fp = os.path.join(self.test_prefix, '.eb-path-index') + index_txt = read_file(index_fp) + + datestamp_pattern = r"[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+" + patterns = [ + r"^# created at: " + datestamp_pattern + '$', + r"^# valid until: " + datestamp_pattern + '$', + r"^g/GCC/GCC-7.3.0-2.30.eb", + r"^t/toy/toy-0\.0\.eb", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + + # existing index is not overwritten without --force + error_pattern = "File exists, not overwriting it without --force: .*/.eb-path-index" + self.assertErrorRegex(EasyBuildError, error_pattern, self._run_mock_eb, args, raise_error=True) + + # also test creating index that's infinitely valid + args.extend(['--index-max-age=0', '--force']) + self._run_mock_eb(args, raise_error=True) + index_txt = read_file(index_fp) + regex = re.compile(r"^# valid until: 9999-12-31 23:59:59", re.M) + self.assertTrue(regex.search(index_txt), "Pattern '%s' found in: %s" % (regex.pattern, index_txt)) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/repository.py b/test/framework/repository.py index 41a985deb2..b2326c7426 100644 --- a/test/framework/repository.py +++ b/test/framework/repository.py @@ -79,7 +79,7 @@ def test_gitrepo(self): print("(skipping GitRepository test)") return - test_repo_url = 'https://github.com/hpcugent/testrepository' + test_repo_url = 'https://github.com/easybuilders/testrepository' # URL repo = GitRepository(test_repo_url) @@ -122,7 +122,7 @@ def test_svnrepo(self): return # GitHub also supports SVN - test_repo_url = 'https://github.com/hpcugent/testrepository' + test_repo_url = 'https://github.com/easybuilders/testrepository' repo = SvnRepository(test_repo_url) repo.init() diff --git a/test/framework/robot.py b/test/framework/robot.py index fc94a84850..41df63a315 100644 --- a/test/framework/robot.py +++ b/test/framework/robot.py @@ -424,14 +424,14 @@ def test_resolve_dependencies_minimal(self): # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", - " ('OpenMPI', '2.1.2'),", # available with GCC/6.4.0-2.28 + # the use of %(version_minor)s here is mainly to check if templates are being handled correctly + # (it doesn't make much sense, but it serves the purpose) + " ('OpenMPI', '%(version_minor)s.1.2'),", # available with GCC/6.4.0-2.28 " ('OpenBLAS', '0.2.20'),", # available with GCC/6.4.0-2.28 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.20'),", # available with gompi/2018a " ('SQLite', '3.8.10.2'),", "]", # toolchain as list line, for easy modification later; - # the use of %(version_minor)s here is mainly to check if templates are being handled correctly - # (it doesn't make much sense, but it serves the purpose) "toolchain = {'name': 'foss', 'version': '%(version_minor)s018a'}", ] write_file(barec, '\n'.join(barec_lines)) diff --git a/test/framework/run.py b/test/framework/run.py index a5f1000e05..e7d608c7b2 100644 --- a/test/framework/run.py +++ b/test/framework/run.py @@ -268,6 +268,15 @@ def test_run_cmd_trace(self): init_config(build_options={'trace': True}) + pattern = [ + r"^ >> running command:", + r"\t\[started at: .*\]", + r"\t\[working dir: .*\]", + r"\t\[output logged in .*\]", + r"\techo hello", + r" >> command completed: exit 0, ran in .*", + ] + self.mock_stdout(True) self.mock_stderr(True) (out, ec) = run_cmd("echo hello") @@ -275,13 +284,24 @@ def test_run_cmd_trace(self): stderr = self.get_stderr() self.mock_stdout(False) self.mock_stderr(False) + self.assertEqual(ec, 0) + self.assertEqual(stderr, '') + regex = re.compile('\n'.join(pattern)) + self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) + + # also test with command that is fed input via stdin + self.mock_stdout(True) + self.mock_stderr(True) + (out, ec) = run_cmd('cat', inp='hello') + stdout = self.get_stdout() + stderr = self.get_stderr() + self.mock_stdout(False) + self.mock_stderr(False) + self.assertEqual(ec, 0) self.assertEqual(stderr, '') - pattern = "^ >> running command:\n" - pattern += "\t\[started at: .*\]\n" - pattern += "\t\[output logged in .*\]\n" - pattern += "\techo hello\n" - pattern += ' >> command completed: exit 0, ran in .*' - regex = re.compile(pattern) + pattern.insert(3, r"\t\[input: hello\]") + pattern[-2] = "\tcat" + regex = re.compile('\n'.join(pattern)) self.assertTrue(regex.search(stdout), "Pattern '%s' found in: %s" % (regex.pattern, stdout)) # trace output can be disabled on a per-command basis @@ -356,11 +376,12 @@ def test_run_cmd_qa_trace(self): self.mock_stdout(False) self.mock_stderr(False) self.assertEqual(stderr, '') - pattern = "^ >> running interactive command:\n" - pattern += "\t\[started at: .*\]\n" - pattern += "\t\[output logged in .*\]\n" - pattern += "\techo \'n: \'; read n; seq 1 \$n\n" - pattern += ' >> interactive command completed: exit 0, ran in .*' + pattern = r"^ >> running interactive command:\n" + pattern += r"\t\[started at: .*\]\n" + pattern += r"\t\[working dir: .*\]\n" + pattern += r"\t\[output logged in .*\]\n" + pattern += r"\techo \'n: \'; read n; seq 1 \$n\n" + pattern += r' >> interactive command completed: exit 0, ran in .*' self.assertTrue(re.search(pattern, stdout), "Pattern '%s' found in: %s" % (pattern, stdout)) # trace output can be disabled on a per-command basis diff --git a/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py b/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py index da16d01483..af97c3f254 100644 --- a/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py +++ b/test/framework/sandbox/easybuild/easyblocks/generic/dummyextension.py @@ -32,3 +32,11 @@ class DummyExtension(ExtensionEasyBlock): """Support for building/installing dummy extensions.""" + + def __init__(self, *args, **kwargs): + + super(DummyExtension, self).__init__(*args, **kwargs) + + # use lowercase name as default value for expected module name, and replace '-' with '_' + if 'modulename' not in self.options: + self.options['modulename'] = self.name.lower().replace('-', '_') diff --git a/test/framework/systemtools.py b/test/framework/systemtools.py index fa1f1331cb..bda6813014 100644 --- a/test/framework/systemtools.py +++ b/test/framework/systemtools.py @@ -44,8 +44,8 @@ from easybuild.tools.systemtools import CPU_VENDORS, AMD, APM, ARM, CAVIUM, IBM, INTEL from easybuild.tools.systemtools import MAX_FREQ_FP, PROC_CPUINFO_FP, PROC_MEMINFO_FP from easybuild.tools.systemtools import check_python_version, pick_dep_version -from easybuild.tools.systemtools import det_parallelism, get_avail_core_count, get_cpu_architecture, get_cpu_family -from easybuild.tools.systemtools import get_cpu_features, get_cpu_model, get_cpu_speed, get_cpu_vendor +from easybuild.tools.systemtools import det_parallelism, get_avail_core_count, get_cpu_arch_name, get_cpu_architecture +from easybuild.tools.systemtools import get_cpu_family, get_cpu_features, get_cpu_model, get_cpu_speed, get_cpu_vendor from easybuild.tools.systemtools import get_gcc_version, get_glibc_version, get_os_type, get_os_name, get_os_version from easybuild.tools.systemtools import get_platform_name, get_shared_lib_ext, get_system_info, get_total_memory @@ -338,6 +338,11 @@ def setUp(self): self.orig_platform_uname = st.platform.uname self.orig_get_tool_version = st.get_tool_version self.orig_sys_version_info = st.sys.version_info + self.orig_HAVE_ARCHSPEC = st.HAVE_ARCHSPEC + if hasattr(st, 'archspec_cpu_host'): + self.orig_archspec_cpu_host = st.archspec_cpu_host + else: + self.orig_archspec_cpu_host = None def tearDown(self): """Cleanup after systemtools test.""" @@ -349,6 +354,9 @@ def tearDown(self): st.platform.uname = self.orig_platform_uname st.get_tool_version = self.orig_get_tool_version st.sys.version_info = self.orig_sys_version_info + st.HAVE_ARCHSPEC = self.orig_HAVE_ARCHSPEC + if self.orig_archspec_cpu_host is not None: + st.archspec_cpu_host = self.orig_archspec_cpu_host super(SystemToolsTest, self).tearDown() def test_avail_core_count_native(self): @@ -529,6 +537,27 @@ def test_cpu_architecture(self): MACHINE_NAME = name self.assertEqual(get_cpu_architecture(), machine_names[name]) + def test_cpu_arch_name_native(self): + """Test getting CPU arch name.""" + arch_name = get_cpu_arch_name() + self.assertTrue(isinstance(arch_name, string_type)) + + def test_cpu_arch_name(self): + """Test getting CPU arch name.""" + + class MicroArch(object): + def __init__(self, name): + self.name = name + + st.HAVE_ARCHSPEC = True + st.archspec_cpu_host = lambda: MicroArch('haswell') + arch_name = get_cpu_arch_name() + self.assertEqual(arch_name, 'haswell') + + st.archspec_cpu_host = lambda: None + arch_name = get_cpu_arch_name() + self.assertEqual(arch_name, 'UNKNOWN') + def test_cpu_vendor_native(self): """Test getting CPU vendor.""" cpu_vendor = get_cpu_vendor() diff --git a/test/framework/toolchain.py b/test/framework/toolchain.py index 909bb2f070..2b0fc84634 100644 --- a/test/framework/toolchain.py +++ b/test/framework/toolchain.py @@ -948,6 +948,48 @@ def test_nosuchtoolchain(self): tc = self.get_toolchain('intel', version='1970.01') self.assertErrorRegex(EasyBuildError, "No module found for toolchain", tc.prepare) + def test_mpi_cmd_prefix(self): + """Test mpi_exec_nranks function.""" + self.modtool.prepend_module_path(self.test_prefix) + + tc = self.get_toolchain('gompi', version='2018a') + tc.prepare() + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks=2), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks='2'), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(), "mpirun -n 1") + self.modtool.purge() + + self.setup_sandbox_for_intel_fftw(self.test_prefix) + tc = self.get_toolchain('intel', version='2018a') + tc.prepare() + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks=2), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks='2'), "mpirun -n 2") + self.assertEqual(tc.mpi_cmd_prefix(), "mpirun -n 1") + self.modtool.purge() + + self.setup_sandbox_for_intel_fftw(self.test_prefix, imklver='10.2.6.038') + tc = self.get_toolchain('intel', version='2012a') + tc.prepare() + + mpi_exec_nranks_re = re.compile("^mpirun --file=.*/mpdboot -machinefile .*/nodes -np 4") + self.assertTrue(mpi_exec_nranks_re.match(tc.mpi_cmd_prefix(nr_ranks=4))) + mpi_exec_nranks_re = re.compile("^mpirun --file=.*/mpdboot -machinefile .*/nodes -np 1") + self.assertTrue(mpi_exec_nranks_re.match(tc.mpi_cmd_prefix())) + + # test specifying custom template for MPI commands + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s", 'silent': True}) + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks="7"), "mpiexec -np 7 --") + self.assertEqual(tc.mpi_cmd_prefix(), "mpiexec -np 1 --") + + # check that we return None when command does not appear at the end of the template + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s option", 'silent': True}) + self.assertEqual(tc.mpi_cmd_prefix(nr_ranks="7"), None) + self.assertEqual(tc.mpi_cmd_prefix(), None) + + # template with extra spaces at the end if fine though + init_config(build_options={'mpi_cmd_template': "mpirun -np %(nr_ranks)s %(cmd)s ", 'silent': True}) + self.assertEqual(tc.mpi_cmd_prefix(), "mpirun -np 1") + def test_mpi_cmd_for(self): """Test mpi_cmd_for function.""" self.modtool.prepend_module_path(self.test_prefix) @@ -974,6 +1016,17 @@ def test_mpi_cmd_for(self): init_config(build_options={'mpi_cmd_template': "mpiexec -np %(nr_ranks)s -- %(cmd)s", 'silent': True}) self.assertEqual(tc.mpi_cmd_for('test123', '7'), "mpiexec -np 7 -- test123") + # check whether expected error is raised when a template with missing keys is used; + # %(ranks)s should be %(nr_ranks)s + init_config(build_options={'mpi_cmd_template': "mpiexec -np %(ranks)s -- %(cmd)s", 'silent': True}) + error_pattern = \ + r"Missing templates in mpi-cmd-template value 'mpiexec -np %\(ranks\)s -- %\(cmd\)s': %\(nr_ranks\)s" + self.assertErrorRegex(EasyBuildError, error_pattern, tc.mpi_cmd_for, 'test', 1) + + init_config(build_options={'mpi_cmd_template': "mpirun %(foo)s -np %(nr_ranks)s %(cmd)s", 'silent': True}) + error_pattern = "Failed to complete MPI cmd template .* with .*: KeyError 'foo'" + self.assertErrorRegex(EasyBuildError, error_pattern, tc.mpi_cmd_for, 'test', 1) + def test_prepare_deps(self): """Test preparing for a toolchain when dependencies are involved.""" tc = self.get_toolchain('GCC', version='6.4.0-2.28') diff --git a/test/framework/toy_build.py b/test/framework/toy_build.py index ef3e5d10e1..3145981ca7 100644 --- a/test/framework/toy_build.py +++ b/test/framework/toy_build.py @@ -34,6 +34,7 @@ import os import re import shutil +import signal import stat import sys import tempfile @@ -1231,6 +1232,7 @@ def test_toy_module_fulltxt(self): r'', r'conflict\("toy"\)', r'', + r'prepend_path\("CMAKE_PREFIX_PATH", root\)', r'prepend_path\("LD_LIBRARY_PATH", pathJoin\(root, "lib"\)\)', r'prepend_path\("LIBRARY_PATH", pathJoin\(root, "lib"\)\)', r'prepend_path\("PATH", pathJoin\(root, "bin"\)\)', @@ -1268,6 +1270,7 @@ def test_toy_module_fulltxt(self): r'', r'conflict toy', r'', + r'prepend-path CMAKE_PREFIX_PATH \$root', r'prepend-path LD_LIBRARY_PATH \$root/lib', r'prepend-path LIBRARY_PATH \$root/lib', r'prepend-path PATH \$root/bin', @@ -1413,7 +1416,7 @@ def test_module_only(self): self.assertTrue(os.path.exists(os.path.join(self.test_installpath, 'software', 'toy', '0.0-deps', 'bin'))) modtxt = read_file(toy_mod) self.assertTrue(re.search("set root %s" % prefix, modtxt)) - self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 1) + self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2) self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1) # install (only) additional module under a hierarchical MNS @@ -1428,7 +1431,7 @@ def test_module_only(self): # existing install is reused modtxt2 = read_file(toy_core_mod) self.assertTrue(re.search("set root %s" % prefix, modtxt2)) - self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2) + self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3) self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1) # make sure load statements for dependencies are included @@ -1439,7 +1442,7 @@ def test_module_only(self): os.remove(toy_core_mod) # test installing (only) additional module in Lua syntax (if Lmod is available) - lmod_abspath = which('lmod') + lmod_abspath = os.environ.get('LMOD_CMD') or which('lmod') if lmod_abspath is not None: args = common_args[:-1] + [ '--allow-modules-tool-mismatch', @@ -1453,7 +1456,7 @@ def test_module_only(self): # existing install is reused modtxt3 = read_file(toy_mod + '.lua') self.assertTrue(re.search('local root = "%s"' % prefix, modtxt3)) - self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 2) + self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software'))), 3) self.assertEqual(len(os.listdir(os.path.join(self.test_installpath, 'software', 'toy'))), 1) # make sure load statements for dependencies are included @@ -2055,7 +2058,7 @@ def test_toy_modaltsoftname(self): self.assertTrue(os.path.exists(os.path.join(modules_path, 'yot', yot_name))) # only subdirectories for software should be created - self.assertEqual(os.listdir(software_path), ['toy']) + self.assertEqual(sorted(os.listdir(software_path)), sorted(['toy', '.locks'])) self.assertEqual(sorted(os.listdir(os.path.join(software_path, 'toy'))), ['0.0-one', '0.0-two']) # only subdirectories for modules with alternative names should be created @@ -2076,17 +2079,24 @@ def test_toy_build_trace(self): self.assertEqual(stderr, '') patterns = [ - "^ >> installation prefix: .*/software/toy/0\.0$", - "^== fetching files\.\.\.\n >> sources:\n >> .*/toy-0\.0\.tar\.gz \[SHA256: 44332000.*\]$", - "^ >> applying patch toy-0\.0_fix-silly-typo-in-printf-statement\.patch$", - "^ >> running command:\n\t\[started at: .*\]\n\t\[output logged in .*\]\n\tgcc toy.c -o toy\n" + - " >> command completed: exit 0, ran in .*", - '^' + '\n'.join([ - "== sanity checking\.\.\.", - " >> file 'bin/yot' or 'bin/toy' found: OK", - " >> \(non-empty\) directory 'bin' found: OK", - ]) + '$', - "^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$", + r"^ >> installation prefix: .*/software/toy/0\.0$", + r"^== fetching files\.\.\.\n >> sources:\n >> .*/toy-0\.0\.tar\.gz \[SHA256: 44332000.*\]$", + r"^ >> applying patch toy-0\.0_fix-silly-typo-in-printf-statement\.patch$", + r'\n'.join([ + r"^ >> running command:", + r"\t\[started at: .*\]", + r"\t\[working dir: .*\]", + r"\t\[output logged in .*\]", + r"\tgcc toy.c -o toy\n" + r'', + ]), + r" >> command completed: exit 0, ran in .*", + r'^' + r'\n'.join([ + r"== sanity checking\.\.\.", + r" >> file 'bin/yot' or 'bin/toy' found: OK", + r" >> \(non-empty\) directory 'bin' found: OK", + ]) + r'$', + r"^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$", ] for pattern in patterns: regex = re.compile(pattern, re.M) @@ -2368,6 +2378,10 @@ def test_fix_shebang(self): test_ec_txt = '\n'.join([ toy_ec_txt, "postinstallcmds = [" + # copy of bin/toy to use in fix_python_shebang_for and fix_perl_shebang_for + " 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.python',", + " 'cp -a %(installdir)s/bin/toy %(installdir)s/bin/toy.perl',", + # hardcoded path to bin/python " 'echo \"#!/usr/bin/python\\n# test\" > %(installdir)s/bin/t1.py',", # hardcoded path to bin/python3.6 @@ -2378,6 +2392,12 @@ def test_fix_shebang(self): " 'echo \"#! /usr/bin/env python3\\n# test\" > %(installdir)s/bin/t4.py',", # 'env python3.6' " 'echo \"#!/usr/bin/env python3.6\\n# test\" > %(installdir)s/bin/t5.py',", + # shebang with space, should strip the space + " 'echo \"#! /usr/bin/env python\\n# test\" > %(installdir)s/bin/t6.py',", + # no shebang python + " 'echo \"# test\" > %(installdir)s/bin/t7.py',", + # shebang bash + " 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b1.sh',", # tests for perl shebang # hardcoded path to bin/perl @@ -2390,19 +2410,34 @@ def test_fix_shebang(self): " 'echo \"#!/usr/bin/perl -w\\n# test\" > %(installdir)s/bin/t4.pl',", # space after #! + 'env perl5' " 'echo \"#!/usr/bin/env perl5\\n# test\" > %(installdir)s/bin/t5.pl',", + # shebang with space, should strip the space + " 'echo \"#! /usr/bin/env perl\\n# test\" > %(installdir)s/bin/t6.pl',", + # no shebang perl + " 'echo \"# test\" > %(installdir)s/bin/t7.pl',", + # shebang bash + " 'echo \"#!/usr/bin/env bash\\n# test\" > %(installdir)s/bin/b2.sh',", "]", - "fix_python_shebang_for = ['bin/t1.py', 'bin/*.py', 'nosuchdir/*.py', 'bin/toy']", - "fix_perl_shebang_for = 'bin/*.pl'", + "fix_python_shebang_for = ['bin/t1.py', 'bin/*.py', 'nosuchdir/*.py', 'bin/toy.python', 'bin/b1.sh']", + "fix_perl_shebang_for = ['bin/*.pl', 'bin/b2.sh', 'bin/toy.perl']", ]) write_file(test_ec, test_ec_txt) self.test_toy_build(ec_file=test_ec, raise_error=True) toy_bindir = os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin') + # bin/toy and bin/toy2 should *not* be patched, since they're binary files + toy_txt = read_file(os.path.join(toy_bindir, 'toy'), mode='rb') + for fn in ['toy.perl', 'toy.python']: + fn_txt = read_file(os.path.join(toy_bindir, fn), mode='rb') + # no shebang added + self.assertFalse(fn_txt.startswith(b"#!/")) + # exact same file as original binary (untouched) + self.assertEqual(toy_txt, fn_txt) + # no re.M, this should match at start of file! py_shebang_regex = re.compile(r'^#!/usr/bin/env python\n# test$') - for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py']: + for pybin in ['t1.py', 't2.py', 't3.py', 't4.py', 't5.py', 't6.py', 't7.py']: pybin_path = os.path.join(toy_bindir, pybin) pybin_txt = read_file(pybin_path) self.assertTrue(py_shebang_regex.match(pybin_txt), @@ -2410,12 +2445,20 @@ def test_fix_shebang(self): # no re.M, this should match at start of file! perl_shebang_regex = re.compile(r'^#!/usr/bin/env perl\n# test$') - for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl']: + for perlbin in ['t1.pl', 't2.pl', 't3.pl', 't4.pl', 't5.pl', 't6.pl', 't7.pl']: perlbin_path = os.path.join(toy_bindir, perlbin) perlbin_txt = read_file(perlbin_path) self.assertTrue(perl_shebang_regex.match(perlbin_txt), "Pattern '%s' found in %s: %s" % (perl_shebang_regex.pattern, perlbin_path, perlbin_txt)) + # There are 2 bash files which shouldn't be influenced by fix_shebang + bash_shebang_regex = re.compile(r'^#!/usr/bin/env bash\n# test$') + for bashbin in ['b1.sh', 'b2.sh']: + bashbin_path = os.path.join(toy_bindir, bashbin) + bashbin_txt = read_file(bashbin_path) + self.assertTrue(bash_shebang_regex.match(bashbin_txt), + "Pattern '%s' found in %s: %s" % (bash_shebang_regex.pattern, bashbin_path, bashbin_txt)) + def test_toy_system_toolchain_alias(self): """Test use of 'system' toolchain alias.""" toy_ec = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') @@ -2474,6 +2517,95 @@ def test_toy_ghost_installdir(self): self.assertFalse(os.path.exists(toy_installdir)) + def test_toy_build_lock(self): + """Test toy installation when a lock is already in place.""" + + locks_dir = os.path.join(self.test_installpath, 'software', '.locks') + toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0') + toy_lock_fn = toy_installdir.replace(os.path.sep, '_') + '.lock' + + toy_lock_path = os.path.join(locks_dir, toy_lock_fn) + mkdir(toy_lock_path, parents=True) + + error_pattern = "Lock .*_software_toy_0.0.lock already exists, aborting!" + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, raise_error=True, verbose=False) + + locks_dir = os.path.join(self.test_prefix, 'locks') + + # no lock in place, so installation proceeds as normal + extra_args = ['--locks-dir=%s' % locks_dir] + self.test_toy_build(extra_args=extra_args, verify=True, raise_error=True) + + # put lock in place in custom locks dir, try again + toy_lock_path = os.path.join(locks_dir, toy_lock_fn) + mkdir(toy_lock_path, parents=True) + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, + extra_args=extra_args, raise_error=True, verbose=False) + + # also test use of --ignore-locks + self.test_toy_build(extra_args=extra_args + ['--ignore-locks'], verify=True, raise_error=True) + + # define a context manager that remove a lock after a while, so we can check the use of --wait-for-lock + class remove_lock_after: + def __init__(self, seconds, lock_fp): + self.seconds = seconds + self.lock_fp = lock_fp + + def remove_lock(self, *args): + remove_dir(self.lock_fp) + + def __enter__(self): + signal.signal(signal.SIGALRM, self.remove_lock) + signal.alarm(self.seconds) + + def __exit__(self, type, value, traceback): + pass + + # wait for lock to be removed, with 1 second interval of checking + extra_args.append('--wait-on-lock=1') + + wait_regex = re.compile("^== lock .*_software_toy_0.0.lock exists, waiting 1 seconds", re.M) + ok_regex = re.compile("^== COMPLETED: Installation ended successfully", re.M) + + self.assertTrue(os.path.exists(toy_lock_path)) + + # use context manager to remove lock after 3 seconds + with remove_lock_after(3, toy_lock_path): + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + + wait_matches = wait_regex.findall(stdout) + # we can't rely on an exact number of 'waiting' messages, so let's go with a range... + self.assertTrue(len(wait_matches) in range(2, 5)) + + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + + # when there is no lock in place, --wait-on-lock has no impact + self.assertFalse(os.path.exists(toy_lock_path)) + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout)) + + # check for clean error on creation of lock + extra_args = ['--locks-dir=/'] + error_pattern = r"Failed to create lock /.*_software_toy_0.0.lock:.* " + error_pattern += r"(Read-only file system|Permission denied)" + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, + extra_args=extra_args, raise_error=True, verbose=False) + def suite(): """ return all the tests in this file """ diff --git a/test/framework/type_checking.py b/test/framework/type_checking.py index 3dd60dcbd0..b1247832c5 100644 --- a/test/framework/type_checking.py +++ b/test/framework/type_checking.py @@ -658,6 +658,8 @@ def test_to_checksums(self): ['be662daa971a640e40be5c804d9d7d10', ('adler32', '0x998410035'), ('crc32', '0x1553842328'), ('md5', 'be662daa971a640e40be5c804d9d7d10'), ('sha1', 'f618096c52244539d0e89867405f573fdb0b55b0'), ('size', 273)], + # None values should not be filtered out, but left in place + [None, 'fa618be8435447a017fd1bf2c7ae922d0428056cfc7449f7a8641edf76b48265', None], ] for checksums in test_inputs: self.assertEqual(to_checksums(checksums), checksums)