diff --git a/.github/workflows/ubuntu-ci-x86_64-gnu.yaml b/.github/workflows/ubuntu-ci-x86_64-gnu.yaml index 68f863aeb..17bc60732 100644 --- a/.github/workflows/ubuntu-ci-x86_64-gnu.yaml +++ b/.github/workflows/ubuntu-ci-x86_64-gnu.yaml @@ -45,23 +45,11 @@ jobs: - name: prepare-directories run: | - # DH* REVERT ME AFTER MERGE mkdir -p ${BUILD_CACHE_PATH} mkdir -p ${SOURCE_CACHE_PATH} - name: create-buildcache run: | - # Get day of week to decide whether to use build caches or not - DOW=$(date +%u) - # Monday is 1 ... Sunday is 7 - if [[ $DOW == 7 ]]; then - export USE_BINARY_CACHE=false - echo "Ignore existing binary cache for creating buildcache environment" - else - export USE_BINARY_CACHE=true - echo "Use existing binary cache for creating buildcache environment" - fi - # Set up spack-stack source ./setup.sh @@ -118,23 +106,22 @@ jobs: # Add additional variants for MET packages, different from config/common/packages.yaml spack config add "packages:met:variants:+python +grib2 +graphics +lidar2nc +modis" - # Concretize and check for duplicates + # Concretize, check for duplicates and for correct compilers being used spack concretize --force --fresh 2>&1 | tee log.concretize.${ENVNAME} ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -i fms -i crtm -i crtm-fix -i esmf -i mapl -i py-cython -i neptune-env -i fms -i ip + spack stack check-preferred-compiler 2>&1 | tee log.check-preferred-compiler.${ENVNAME} # Add and update source cache spack mirror add local-source file://${SOURCE_CACHE_PATH}/ spack mirror create -a -d ${SOURCE_CACHE_PATH}/ - # Add binary cache if requested - if [ "$USE_BINARY_CACHE" = true ] ; then - set +e - spack mirror add local-binary file://${BUILD_CACHE_PATH}/ - spack buildcache update-index local-binary - set -e - echo "Packages in spack binary cache:" - spack buildcache list - fi + # Add binary cache + set +e + spack mirror add local-binary file://${BUILD_CACHE_PATH}/ + spack buildcache update-index local-binary + set -e + echo "Packages in spack binary cache:" + spack buildcache list # Break installation up in pieces and create build caches in between # This allows us to "spin up" builds that altogether take longer than @@ -169,11 +156,6 @@ jobs: spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.${ENVNAME}.all spack buildcache create -u local-binary - # Remove binary cache for next round of concretization - if [ "$USE_BINARY_CACHE" = true ] ; then - spack mirror rm local-binary - fi - # Remove buildcache config settings spack config remove config:install_tree:padded_length @@ -196,14 +178,10 @@ jobs: rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/ spack env activate ${ENVDIR} - # Concretize and check for duplicates - spack concretize --force 2>&1 | tee log.concretize.${ENVNAME} + # Concretize, check for duplicates and for correct compilers being used + spack concretize --force --fresh 2>&1 | tee log.concretize.${ENVNAME} ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -i fms -i crtm -i crtm-fix -i esmf -i mapl -i py-cython -i neptune-env -i fms -i ip - - # Add binary cache back in - spack mirror add local-binary file://${BUILD_CACHE_PATH}/ - echo "Packages in combined spack build caches:" - spack buildcache list + spack stack check-preferred-compiler 2>&1 | tee log.check-preferred-compiler.${ENVNAME} # Install from cache spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.${ENVNAME}.all diff --git a/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml b/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml index fa149b001..2e12da6c6 100644 --- a/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml +++ b/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml @@ -45,23 +45,11 @@ jobs: - name: prepare-directories run: | - # DH* REVERT ME AFTER MERGE mkdir -p ${BUILD_CACHE_PATH} mkdir -p ${SOURCE_CACHE_PATH} - name: create-buildcache run: | - # Get day of week to decide whether to use build caches or not - DOW=$(date +%u) - # Monday is 1 ... Sunday is 7 - if [[ $DOW == 7 ]]; then - export USE_BINARY_CACHE=false - echo "Ignore existing binary cache for creating buildcache environment" - else - export USE_BINARY_CACHE=true - echo "Use existing binary cache for creating buildcache environment" - fi - # Set up spack-stack source ./setup.sh @@ -134,23 +122,22 @@ jobs: # Don't generate ecflow module when using external package spack config add "modules:default:tcl:exclude:[ecflow]" - # Concretize and check for duplicates + # Concretize, check for duplicates and for correct compilers being used spack concretize --force --fresh 2>&1 | tee log.concretize.${ENVNAME} ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -i fms -i crtm -i crtm-fix -i esmf -i mapl -i py-cython -i neptune-env -i fms -i ip + spack stack check-preferred-compiler 2>&1 | tee log.check-preferred-compiler.${ENVNAME} # Add and update source cache spack mirror add local-source file://${SOURCE_CACHE_PATH}/ spack mirror create -a -d ${SOURCE_CACHE_PATH}/ - # Add binary cache if requested - if [ "$USE_BINARY_CACHE" = true ] ; then - set +e - spack mirror add local-binary file://${BUILD_CACHE_PATH}/ - spack buildcache update-index local-binary - set -e - echo "Packages in spack binary cache:" - spack buildcache list - fi + # Add binary cache + set +e + spack mirror add local-binary file://${BUILD_CACHE_PATH}/ + spack buildcache update-index local-binary + set -e + echo "Packages in spack binary cache:" + spack buildcache list # Break installation up in pieces and create build caches in between # This allows us to "spin up" builds that altogether take longer than @@ -188,11 +175,6 @@ jobs: # Check for libirc.so in shared libraries (returns 1=error if found) ${SPACK_STACK_DIR}/util/check_libirc.sh - # Remove binary cache for next round of concretization - if [ "$USE_BINARY_CACHE" = true ] ; then - spack mirror rm local-binary - fi - # Remove buildcache config settings spack config remove config:install_tree:padded_length @@ -215,14 +197,10 @@ jobs: rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/ spack env activate ${ENVDIR} - # Concretize and check for duplicates - spack concretize --force 2>&1 | tee log.concretize.${ENVNAME} + # Concretize, check for duplicates and for correct compilers being used + spack concretize --force --fresh 2>&1 | tee log.concretize.${ENVNAME} ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -i fms -i crtm -i crtm-fix -i esmf -i mapl -i py-cython -i neptune-env -i fms -i ip - - # Add binary cache back in - spack mirror add local-binary file://${BUILD_CACHE_PATH}/ - echo "Packages in combined spack build caches:" - spack buildcache list + spack stack check-preferred-compiler 2>&1 | tee log.check-preferred-compiler.${ENVNAME} # Install from cache spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.${ENVNAME}.all diff --git a/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml b/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml index e5f4576f7..f6f59d18e 100644 --- a/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml +++ b/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml @@ -45,23 +45,11 @@ jobs: - name: prepare-directories run: | - # DH* REVERT ME AFTER MERGE mkdir -p ${BUILD_CACHE_PATH} mkdir -p ${SOURCE_CACHE_PATH} - name: create-buildcache run: | - # Get day of week to decide whether to use build caches or not - DOW=$(date +%u) - # Monday is 1 ... Sunday is 7 - if [[ $DOW == 7 ]]; then - export USE_BINARY_CACHE=false - echo "Ignore existing binary cache for creating buildcache environment" - else - export USE_BINARY_CACHE=true - echo "Use existing binary cache for creating buildcache environment" - fi - # Set up spack-stack source ./setup.sh @@ -134,23 +122,22 @@ jobs: # Don't generate ecflow module when using external package spack config add "modules:default:tcl:exclude:[ecflow]" - # Concretize and check for duplicates + # Concretize, check for duplicates and for correct compilers being used spack concretize --force --fresh 2>&1 | tee log.concretize.${ENVNAME} ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -i fms -i crtm -i crtm-fix -i esmf -i mapl -i py-cython -i neptune-env -i fms -i ip + spack stack check-preferred-compiler 2>&1 | tee log.check-preferred-compiler.${ENVNAME} # Add and update source cache spack mirror add local-source file://${SOURCE_CACHE_PATH}/ spack mirror create -a -d ${SOURCE_CACHE_PATH}/ - # Add binary cache if requested - if [ "$USE_BINARY_CACHE" = true ] ; then - set +e - spack mirror add local-binary file://${BUILD_CACHE_PATH}/ - spack buildcache update-index local-binary - set -e - echo "Packages in spack binary cache:" - spack buildcache list - fi + # Add binary cache + set +e + spack mirror add local-binary file://${BUILD_CACHE_PATH}/ + spack buildcache update-index local-binary + set -e + echo "Packages in spack binary cache:" + spack buildcache list # Break installation up in pieces and create build caches in between # This allows us to "spin up" builds that altogether take longer than @@ -188,11 +175,6 @@ jobs: # Check for libirc.so in shared libraries (returns 1=error if found) ${SPACK_STACK_DIR}/util/check_libirc.sh - # Remove binary cache for next round of concretization - if [ "$USE_BINARY_CACHE" = true ] ; then - spack mirror rm local-binary - fi - # Remove buildcache config settings spack config remove config:install_tree:padded_length @@ -215,14 +197,10 @@ jobs: rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/ spack env activate ${ENVDIR} - # Concretize and check for duplicates - spack concretize --force 2>&1 | tee log.concretize.${ENVNAME} + # Concretize, check for duplicates and for correct compilers being used + spack concretize --force --fresh 2>&1 | tee log.concretize.${ENVNAME} ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -i fms -i crtm -i crtm-fix -i esmf -i mapl -i py-cython -i neptune-env -i fms -i ip - - # Add binary cache back in - spack mirror add local-binary file://${BUILD_CACHE_PATH}/ - echo "Packages in combined spack build caches:" - spack buildcache list + spack stack check-preferred-compiler 2>&1 | tee log.check-preferred-compiler.${ENVNAME} # Install from cache spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.${ENVNAME}.all diff --git a/configs/common/packages_oneapi.yaml b/configs/common/packages_oneapi.yaml index 93a7efbee..9cd45b082 100644 --- a/configs/common/packages_oneapi.yaml +++ b/configs/common/packages_oneapi.yaml @@ -19,43 +19,46 @@ packages: # Individual package settings antlr: require: - - '%gcc' + - '%c,cxx=gcc' bison: require: - - '%gcc' + - '%c,cxx=gcc' cmake: require: - - '%gcc' + - '%c,cxx=gcc' ectrans: require: - +mkl ~fftw gmake: require: - - '%gcc' + - '%c=gcc' go: require: - '%c,cxx=gcc' libbsd: require: - - '%gcc' + - '%c=gcc' libmd: require: - - '%gcc' + - '%c=gcc' met: require: - +shared-intel nco: require: - - '%gcc' + - '%c,cxx=gcc' parallel-netcdf: require: - +shared-intel py-maturin: require: - - '%gcc' + - '%c=gcc' py-scipy: require: - 'cxxflags="-O1"' qt: require: - - '%gcc' + - '%c,cxx=gcc' + rust: + require: + - '%c,cxx=gcc' diff --git a/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack.py b/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack.py index c26630594..7abd05640 100644 --- a/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack.py +++ b/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack.py @@ -8,6 +8,10 @@ setup_meta_modules_parser, stack_setup_meta_modules, ) +from spack.extensions.stack.cmd.stack_cmds.check_preferred_compiler import ( + setup_preferred_compiler_parser, + stack_check_preferred_compiler, +) from spack.extensions.stack.stack_paths import stack_path description = "Create spack-stack environment" @@ -26,10 +30,17 @@ def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='stack_command') create_parser = sp.add_parser('create', help='Create spack-stack environment or container.') - meta_modules_parser = sp.add_parser('setup-meta-modules', - help='Create lmod/lua or tcl/tk meta-modules') + meta_modules_parser = sp.add_parser( + 'setup-meta-modules', + help='Create lmod/lua or tcl/tk meta-modules', + ) + preferred_compiler_parser = sp.add_parser( + 'check-preferred-compiler', + help='Check that the preferred compiler is being used', + ) setup_create_parser(create_parser) setup_meta_modules_parser(meta_modules_parser) + setup_preferred_compiler_parser(preferred_compiler_parser) # Main command that calls subcommands @@ -38,3 +49,5 @@ def stack(parser, args): stack_create(parser, args) if args.stack_command == 'setup-meta-modules': stack_setup_meta_modules(parser, args) + if args.stack_command == 'check-preferred-compiler': + stack_check_preferred_compiler(parser, args) diff --git a/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack_cmds/check_preferred_compiler.py b/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack_cmds/check_preferred_compiler.py new file mode 100644 index 000000000..13f4d1120 --- /dev/null +++ b/spack-ext/lib/jcsda-emc/spack-stack/stack/cmd/stack_cmds/check_preferred_compiler.py @@ -0,0 +1,14 @@ +from spack.extensions.stack.compiler_utils import check_preferred_compiler + +description = "Check preferred compiler" +section = "spack-stack" +level = "long" + + +# Add potential arguments to check-preferred-compiler +def setup_preferred_compiler_parser(subparser): + pass + + +def stack_check_preferred_compiler(parser, args): + check_preferred_compiler() diff --git a/spack-ext/lib/jcsda-emc/spack-stack/stack/common.py b/spack-ext/lib/jcsda-emc/spack-stack/stack/common.py new file mode 100644 index 000000000..5379715e1 --- /dev/null +++ b/spack-ext/lib/jcsda-emc/spack-stack/stack/common.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +# Terminal colors +RED = "\033[91m" +GREEN = "\033[92m" +YELLOW = "\033[93m" +BLUE = "\033[94m" +RESET = "\033[0m" + +# Colored log levels +INFO_LABEL = "INFO: " +ERROR_LABEL = "\033[91mERROR:\033[0m " + +# Aliases to shorten module paths for tcl modules. These aliases must match +# the compiler and MPI name translations in configs/common/modules_tcl.yaml +ALIASES = { + "none" : "none", + # Compilers + "gcc" : "gcc", + "intel-oneapi-compilers-classic" : "intel", + "intel-oneapi-compilers" : "oneapi", + "llvm" : "llvm", + # MPI + "cray-mpich" : "cray-mpich", + "intel-oneapi-mpi" : "impi", + "mpich" : "mpich", + "mpt" : "mpt", + "openmpi" : "openmpi", +} + + +def get_preferred_compiler(config): + """Determine the preferred compiler by looking at + packages: + fortran: + prefer: + - COMPILER_NAME (gcc, intel-oneapi-compilers, llvm, ..) + """ + try: + preferred_compilers = config.get("packages")["fortran"]["prefer"] + except: + raise Exception( + """Unable to detect preferred compiler from environment. + Does the environment have the config entry 'packages:fortran:prefer?'""" + ) + if len(preferred_compilers)>1: + raise Exception(f"Invalid value for packages:fortran:prefer is {preferred_compilers}") + preferred_compiler = preferred_compilers[0] + return preferred_compiler diff --git a/spack-ext/lib/jcsda-emc/spack-stack/stack/compiler_utils.py b/spack-ext/lib/jcsda-emc/spack-stack/stack/compiler_utils.py new file mode 100644 index 000000000..f335212f2 --- /dev/null +++ b/spack-ext/lib/jcsda-emc/spack-stack/stack/compiler_utils.py @@ -0,0 +1,155 @@ +#!/usr/bin/env python3 + +import logging +import re + +import spack +import spack.environment as ev +from spack.provider_index import ProviderIndex + +from spack.extensions.stack.common import ALIASES +from spack.extensions.stack.common import RED, RESET +from spack.extensions.stack.common import get_preferred_compiler + + +def get_compiler_name_and_version(string): + compiler_name = string.replace("@=", "@").split("@")[0] + try: + compiler_version = string.replace("@=", "@").split("@")[1] + except: + compiler_version = None + return (compiler_name, compiler_version) + + +def get_compiler_choice(string): + """Parse string for a Spack version 1 compiler dependency + declaration. By intentionally not matching old (spack v0) + compiler dependency declarations ("%gcc", "%oneapi", ...), + we force updating the Spack configuration files to v1.""" + COMPILER_CHOICE_REGEX_STRING = "^(%+)(" + \ + "c=|" + \ + "cxx=|" + \ + "fortran=|" + \ + "c,cxx=|" + \ + "cxx,c=|" + \ + "c,fortran=|" + \ + "fortran,c=|" + \ + "cxx,fortran=|" + \ + "fortran,cxx=|" + \ + "c,cxx,fortran=|" + \ + "fortran,c,cxx=|" + \ + "cxx,fortran,c=|" + \ + "c,fortran,cxx=|" + \ + "cxx,c,fortran=|" + \ + "fortran,cxx,c=)(\S+)\s*$" + COMPILER_CHOICE_REGEX = re.compile(COMPILER_CHOICE_REGEX_STRING) + match = COMPILER_CHOICE_REGEX.match(string) + if match: + return match.group(3) + return None + + +def check_preferred_compiler(): + """For an active environment, check that the preferred compiler + is being used for all packages except those that explicitly + request a different compiler. For the latter packages, check + that the explicitly requested compiler is being used.""" + + logging.info("Configuring active spack environment ...") + env_dir = ev.active_environment().path + if not env_dir: + raise Exception("No active spack environment") + env = spack.environment.Environment(env_dir) + spack.environment.environment.activate(env) + logging.info(" ... environment directory: {}".format(env_dir)) + + # Get all specs and determine compilers + specs = env.all_specs() + if not specs: + raise Exception(f"{RED}No specs found - did you run 'spack concretize'?{RESET}") + q = ProviderIndex(specs=specs, repository=spack.repo.PATH) + + c_providers = q.providers_for("c") + cxx_providers = q.providers_for("cxx") + fortran_providers = q.providers_for("fortran") + compilers = list(set(c_providers + cxx_providers + fortran_providers)) + if not compilers: + raise Exception(f"{RED}No compilers found{RESET}!") + logging.info(f" ... compilers: {compilers}") + + # Determine the preferred compiler + preferred_compiler = get_preferred_compiler(spack.config) + (preferred_compiler_name, preferred_compiler_version) = get_compiler_name_and_version(preferred_compiler) + logging.info(" ... preferred compiler: {}".format(preferred_compiler)) + + # Get package config to compare actual specs against the intended config + package_config = spack.config.get("packages") + + logging.info("Checking all specs ...") + errors = 0 + for spec in specs: + # If the spec has no compiler dependency, an exception will be thrown - ignore package + try: + compiler_name = spec.compiler.name + compiler_version = spec.compiler.version if preferred_compiler_version else None + except: + logging.info(f" ... {spec.name}@{spec.version}/{spec.dag_hash(length=7)} has no compiler dependency") + continue + # If the spec compiler matches the preferred compiler for the environment, move on. + # Note that this permits situations where a packages has an explicit preferred (but + # not explicitly required) compiler, but Spack decides to use the preferred (and + # different) compiler for the environment instead. + if preferred_compiler_name == compiler_name and preferred_compiler_version == compiler_version: + logging.info(f" ... {spec.name}@{spec.version}/{spec.dag_hash(length=7)} uses preferred compiler") + else: + spec_required_compiler_name = None + spec_required_compiler_version = None + spec_preferred_compiler_name = None + spec_preferred_compiler_version = None + for key, value in package_config[spec.name].items(): + # To simplify parsing, turn scalar values into CommentedSeq of length 1 + if isinstance(value, (str, bytes)): + values = CommentedSeq([value]) + else: + values = value + # Loop through all values to check for required or preferred compilers + for entry in values: + if key.lower() == "require": + choice = get_compiler_choice(entry.lower()) + # Not a compiler preference, carry on + if not choice: + continue + # Check that the explicitly required compiler is a valid (existing) + # compiler for this environment. This requirement may be relaxed in + # the future if we start building compilers in spack environments. + if any(choice in c for c in compilers): + (spec_required_compiler_name, spec_required_compiler_version) = get_compiler_name_and_version(choice) + elif key.lower() == "prefer": + choice = get_compiler_choice(entry.lower()) + # Not a compiler preference, carry on + if not choice: + continue + # Check that the explicitly preferred compiler is a valid (existing) + # compiler for this environment. This requirement may be relaxed in + # the future if we start building compilers in spack environments. + if any(choice in c for c in compilers): + (spec_preferred_compiler_name, spec_preferred_compiler_version) = get_compiler_name_and_version(choice) + # If we have a hard requirement for a compiler, we can stop scanning the spec package config + if spec_required_compiler_name: + break + if spec_required_compiler_name == compiler_name and \ + ( (not spec_required_compiler_version or not compiler_version) or \ + (spec_required_compiler_version==compiler_version) ): + logging.info(f" ... {spec.name}@{spec.version}/{spec.dag_hash(length=7)} uses explicitly required compiler") + elif spec_preferred_compiler_name == compiler_name and \ + ( (not spec_preferred_compiler_version or not compiler_version) or \ + (spec_preferred_compiler_version==compiler_version) ): + logging.info(f" ... {spec.name}@{spec.version}/{spec.dag_hash(length=7)} uses explicitly preferred compiler") + else: + errors += 1 + logging.error(f" ... {RED}error: {spec.name}@{spec.version}/{spec.dag_hash(length=7)} does not use intended compiler\n" + \ + f" check also that any explicit preferred/required compiler dependencies are using Spack v1 syntax{RESET}") + if errors==1: + raise Exception(f"{RED}Detected {errors} compiler mismatch!{RESET}") + elif errors: + raise Exception(f"{RED}Detected {errors} compiler mismatches!{RESET}") diff --git a/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py b/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py index 0411dec69..a31fb1d92 100755 --- a/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py +++ b/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py @@ -8,9 +8,11 @@ import spack import spack.environment as ev -# import spack.repo from spack.provider_index import ProviderIndex +from spack.extensions.stack.common import ALIASES +from spack.extensions.stack.common import get_preferred_compiler + # logging.basicConfig(level=logging.INFO) logging.basicConfig(format="%(message)s", level=logging.DEBUG) @@ -52,24 +54,6 @@ "MPIROOT": "", } -# Aliases to shorten module paths for tcl modules. These aliases must match -# the compiler and MPI name translations in configs/common/modules_tcl.yaml -ALIASES = { - "none" : "none", - # Compilers - "gcc" : "gcc", - "intel-oneapi-compilers-classic" : "intel", - "intel-oneapi-compilers" : "oneapi", - "llvm" : "llvm", - # MPI - "cray-mpich" : "cray-mpich", - # Do we still need intel-mpi, and if yes, use the same impi? - "intel-oneapi-mpi" : "impi", - "mpich" : "mpich", - "mpt" : "mpt", - "openmpi" : "openmpi", -} - def setenv_command(module_choice, key, value): if module_choice == "lmod": @@ -172,26 +156,6 @@ def substitute_config_vars(config_str): return config_str -def get_preferred_compiler(): - """Determine the preferred compiler by looking at - packages: - fortran: - prefer: - - COMPILER_NAME (gcc, intel-oneapi-compilers, llvm, ..) - """ - try: - preferred_compilers = spack.config.get("packages")["fortran"]["prefer"] - except: - raise Exception( - """Unable to detect preferred compiler from environment. - Does the environment have the config entry 'packages:fortran:prefer?'""" - ) - if len(preferred_compilers)>1: - raise Exception(f"Invalid value for packages:fortran:prefer is {preferred_compilers}") - preferred_compiler = preferred_compilers[0] - return preferred_compiler - - def remove_compiler_prefices_from_tcl_modulefiles(modulepath, compiler_list, mpi_provider, module_choice): """Remove compiler and mpi prefices from tcl modulefiles in modulepath""" logging.info(f" ... ... removing compiler/mpi prefices from tcl modulefiles in {modulepath}") @@ -243,7 +207,12 @@ def remove_compiler_prefices_from_tcl_modulefiles(modulepath, compiler_list, mpi def setup_meta_modules(): - # Find currently active spack environment, activate here + """For an active environment, create meta-modules for the preferred + compiler and the MPI provider (compiled with the preferred compiler). + For tcl/tk environment modules, remove modulepath prefices from the + spack-generated modules and implement a module hiearchy modeled after + the lua/lmod modules.""" + logging.info("Configuring active spack environment ...") env_dir = ev.active_environment().path if not env_dir: @@ -282,8 +251,7 @@ def setup_meta_modules(): logging.info(f" ... module directory: {module_dir}") # Get all specs and determine compilers - hashes = env.all_hashes() - specs = spack.store.STORE.db.query(hashes=hashes) + specs = env.all_specs() q = ProviderIndex(specs=specs, repository=spack.repo.PATH) c_providers = q.providers_for("c") @@ -320,7 +288,7 @@ def setup_meta_modules(): # takes it and adds it to the stack-COMPILER metamodule. Likewise, we need # to save the list of compiler substitutions from the preferred compiler # so that we have access to it when we build the MPI meta module. - preferred_compiler = get_preferred_compiler() + preferred_compiler = get_preferred_compiler(spack.config) logging.info(" ... preferred compiler: {}".format(preferred_compiler)) # Sort the list using a custom key @@ -330,7 +298,7 @@ def custom_sort_key(entry): return (1 if preferred_compiler in entry else 0, entry) compilers = sorted(compilers, key=custom_sort_key) - # Get mpi providers (currently only one mpi provider is supported) + # Get mpi providers (currently only one mpi provider is supported) mpi_providers = q.providers_for("mpi") if len(mpi_providers)>1: raise Exception(f"Expected no or one MPI provider, but got {mpi_providers}") diff --git a/spack-ext/lib/jcsda-emc/spack-stack/tests/test_check_preferred_compiler.py b/spack-ext/lib/jcsda-emc/spack-stack/tests/test_check_preferred_compiler.py new file mode 100644 index 000000000..6980d431c --- /dev/null +++ b/spack-ext/lib/jcsda-emc/spack-stack/tests/test_check_preferred_compiler.py @@ -0,0 +1,107 @@ +import shutil +import os + +import pytest + +import spack +import spack.environment as ev +from spack.llnl.util.filesystem import filter_file +import spack.main + +spack_stack_cmd = spack.main.SpackCommand("stack") + + +# Find spack-stack directory assuming this Spack instance +# is a submodule of spack-stack. +def stack_path(*paths): + stack_dir = os.path.dirname(spack.paths.spack_root) + + if not os.path.exists(os.path.join(stack_dir, ".spackstack")): + return None + + return os.path.join(stack_dir, *paths) + + +test_dir = stack_path("envs", "unit-tests", "check-preferred-compiler") + + +@pytest.mark.extension("stack") +@pytest.mark.filterwarnings("ignore::UserWarning") +def test_check_preferred_compiler(): + if not test_dir: + return + + os.makedirs(test_dir, exist_ok=True) + env_root_dir = os.path.join(test_dir) + + env_name = "test1" + env_dir = os.path.join(env_root_dir, env_name) + module_dir = os.path.join(env_dir, "modules") + if os.path.exists(env_dir): + shutil.rmtree(env_dir) + + spack_stack_cmd("create", "env", "--dir", env_root_dir, "--name", env_name, "--compiler", "clang") + + # Create empty env + env = ev.Environment(manifest_dir=env_dir) + ev.activate(env) + + packages_definition = """ +packages: + gcc: + externals: + - spec: gcc@11.5.0 languages:='c,c++,fortran' + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/gcc + cxx: /usr/bin/g++ + fortran: /usr/bin/gfortran + llvm: + externals: + - spec: llvm@21.1.0 + prefix: /usr + extra_attributes: + compilers: + c: /usr/bin/clang + cxx: /usr/bin/clang++ + fortran: /usr/bin/flang-new + mpi: + buildable: false + openmpi: + externals: + - spec: openmpi@5.0.8 ~internal-hwloc +two_level_namespace + prefix: /usr + zlib: + prefer: + - '%c=gcc' + libszip: + require: + - '%c=gcc' +""" + site_packages_yaml = os.path.join(env_dir, "site", "packages.yaml") + if os.path.exists(site_packages_yaml): + raise Exception("Not implemented: appending to existing {site_packages_yaml}") + with open(site_packages_yaml, 'w') as f: + f.write(packages_definition) + + cmd = spack.main.SpackCommand("add") + cmd("gcc", "openmpi", "zlib", "libszip") + + cmd = spack.main.SpackCommand("concretize") + cmd("--force", "--fresh") + + spack_stack_cmd("check-preferred-compiler") + + filter_file("%c=gcc", "%c=llvm", site_packages_yaml, string=True) + + with pytest.raises(Exception) as error: + spack_stack_cmd("check-preferred-compiler") + # There is only one compiler mismatch for libszip. + # The concretizer chooses to use LLVM for zlib because + # of the general compiler preference (common/packages.yaml), + # i.e. it ignores the zlib-specific preference. The tool + # check-preferred-compiler correctly reports a violation + # of the hard requirement for libszip, but allows zlib + # to be built with the default compiler instead of the soft req. + assert "Detected 1 compiler mismatch" in str(error) diff --git a/spack-ext/lib/jcsda-emc/spack-stack/tests/test_setup_meta_modules.py b/spack-ext/lib/jcsda-emc/spack-stack/tests/test_setup_meta_modules.py index 3f3360b8e..ba0af2c20 100644 --- a/spack-ext/lib/jcsda-emc/spack-stack/tests/test_setup_meta_modules.py +++ b/spack-ext/lib/jcsda-emc/spack-stack/tests/test_setup_meta_modules.py @@ -33,7 +33,7 @@ def test_setup_meta_modules(): os.makedirs(test_dir, exist_ok=True) env_root_dir = os.path.join(test_dir) - env_name = "modtest1" + env_name = "test1" env_dir = os.path.join(env_root_dir, env_name) module_dir = os.path.join(env_dir, "modules") if os.path.exists(env_dir):