diff --git a/.gitignore b/.gitignore index c8b95e4482..593345db93 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ build/ dist/ *egg-info/ *.swp +.mypy_cache/ Dockerfile.* Singularity.* diff --git a/easybuild/framework/easyblock.py b/easybuild/framework/easyblock.py index 1aba3187d2..5cdfbae2f5 100644 --- a/easybuild/framework/easyblock.py +++ b/easybuild/framework/easyblock.py @@ -71,12 +71,12 @@ from easybuild.tools.config import install_path, log_path, package_path, source_paths from easybuild.tools.environment import restore_env, sanitize_env from easybuild.tools.filetools import CHECKSUM_TYPE_MD5, CHECKSUM_TYPE_SHA256 -from easybuild.tools.filetools import adjust_permissions, apply_patch, back_up_file -from easybuild.tools.filetools import change_dir, convert_name, compute_checksum, copy_file, derive_alt_pypi_url -from easybuild.tools.filetools import diff_files, download_file, encode_class_name, extract_file +from easybuild.tools.filetools import adjust_permissions, apply_patch, back_up_file, change_dir, convert_name +from easybuild.tools.filetools import compute_checksum, copy_file, check_lock, create_lock, derive_alt_pypi_url +from easybuild.tools.filetools import diff_files, dir_contains_files, download_file, encode_class_name, extract_file from easybuild.tools.filetools import find_backup_name_candidate, get_source_tarball_from_git, is_alt_pypi_url from easybuild.tools.filetools import is_binary, is_sha256_checksum, mkdir, move_file, move_logs, read_file, remove_dir -from easybuild.tools.filetools import remove_file, verify_checksum, weld_paths, write_file, dir_contains_files +from easybuild.tools.filetools import remove_file, remove_lock, verify_checksum, weld_paths, write_file from easybuild.tools.hooks import BUILD_STEP, CLEANUP_STEP, CONFIGURE_STEP, EXTENSIONS_STEP, FETCH_STEP, INSTALL_STEP from easybuild.tools.hooks import MODULE_STEP, PACKAGE_STEP, PATCH_STEP, PERMISSIONS_STEP, POSTITER_STEP, POSTPROC_STEP from easybuild.tools.hooks import PREPARE_STEP, READY_STEP, SANITYCHECK_STEP, SOURCE_STEP, TEST_STEP, TESTCASES_STEP @@ -1014,6 +1014,25 @@ def make_devel_module(self, create_in_builddir=False): # cleanup: unload fake module, remove fake module dir self.clean_up_fake_module(fake_mod_data) + def make_module_deppaths(self): + """ + Add specific 'module use' actions to module file, in order to find + dependencies outside the end user's MODULEPATH. + """ + deppaths = self.cfg['moddependpaths'] + if not deppaths: + return '' + elif not isinstance(deppaths, (str, list, tuple)): + raise EasyBuildError("moddependpaths value %s (type: %s) is not a string, list or tuple", + deppaths, type(deppaths)) + + if isinstance(deppaths, str): + txt = self.module_generator.use([deppaths], guarded=True) + else: + txt = self.module_generator.use(deppaths, guarded=True) + + return txt + def make_module_dep(self, unload_info=None): """ Make the dependencies for the module file. @@ -1194,7 +1213,8 @@ def make_module_extra_extensions(self): lines = [self.module_extra_extensions] # set environment variable that specifies list of extensions - exts_list = ','.join(['%s-%s' % (ext[0], ext[1]) for ext in self.cfg['exts_list']]) + # We need only name and version, so don't resolve templates + exts_list = ','.join(['-'.join(ext[:2]) for ext in self.cfg.get_ref('exts_list')]) env_var_name = convert_name(self.name, upper=True) lines.append(self.module_generator.set_environment('EBEXTSLIST%s' % env_var_name, exts_list)) @@ -1207,7 +1227,7 @@ def make_module_footer(self): footer = [self.module_generator.comment("Built with EasyBuild version %s" % VERBOSE_VERSION)] # add extra stuff for extensions (if any) - if self.cfg['exts_list']: + if self.cfg.get_ref('exts_list'): footer.append(self.make_module_extra_extensions()) # include modules footer if one is specified @@ -1791,7 +1811,7 @@ def fetch_step(self, skip_checksums=False): trace_msg(msg) # fetch extensions - if self.cfg['exts_list']: + if self.cfg.get_ref('exts_list'): self.exts = self.fetch_extension_sources(skip_checksums=skip_checksums) # create parent dirs in install and modules path already @@ -1911,7 +1931,9 @@ def extract_step(self): """ for src in self.src: self.log.info("Unpacking source %s" % src['name']) - srcdir = extract_file(src['path'], self.builddir, cmd=src['cmd'], extra_options=self.cfg['unpack_options']) + srcdir = extract_file(src['path'], self.builddir, cmd=src['cmd'], + extra_options=self.cfg['unpack_options'], change_into_dir=False) + change_dir(srcdir) if srcdir: self.src[self.src.index(src)]['finalpath'] = srcdir else: @@ -2063,7 +2085,7 @@ def extensions_step(self, fetch=False): - find source for extensions, in 'extensions' (and 'packages' for legacy reasons) - run extra_extensions """ - if len(self.cfg['exts_list']) == 0: + if not self.cfg.get_ref('exts_list'): self.log.debug("No extensions in exts_list") return @@ -2409,37 +2431,71 @@ def _sanity_check_step_common(self, custom_paths, custom_commands): SANITY_CHECK_PATHS_DIRS: ("(non-empty) directory", lambda dp: os.path.isdir(dp) and os.listdir(dp)), } - # prepare sanity check paths - paths = self.cfg['sanity_check_paths'] - if not paths: + enhance_sanity_check = self.cfg['enhance_sanity_check'] + ec_commands = self.cfg['sanity_check_commands'] + ec_paths = self.cfg['sanity_check_paths'] + + # if enhance_sanity_check is not enabled, only sanity_check_paths specified in the easyconfig file are used, + # the ones provided by the easyblock (via custom_paths) are ignored + if ec_paths and not enhance_sanity_check: + paths = ec_paths + self.log.info("Using (only) sanity check paths specified by easyconfig file: %s", paths) + else: + # if no sanity_check_paths are specified in easyconfig, + # we fall back to the ones provided by the easyblock via custom_paths if custom_paths: paths = custom_paths - self.log.info("Using customized sanity check paths: %s" % paths) + self.log.info("Using customized sanity check paths: %s", paths) + # if custom_paths is empty, we fall back to a generic set of paths: + # non-empty bin/ + /lib or /lib64 directories else: paths = {} for key in path_keys_and_check: paths.setdefault(key, []) paths.update({SANITY_CHECK_PATHS_DIRS: ['bin', ('lib', 'lib64')]}) - self.log.info("Using default sanity check paths: %s" % paths) + self.log.info("Using default sanity check paths: %s", paths) + + # if enhance_sanity_check is enabled *and* sanity_check_paths are specified in the easyconfig, + # those paths are used to enhance the paths provided by the easyblock + if enhance_sanity_check and ec_paths: + for key in ec_paths: + val = ec_paths[key] + if isinstance(val, list): + paths[key] = paths.get(key, []) + val + else: + error_pattern = "Incorrect value type in sanity_check_paths, should be a list: " + error_pattern += "%s (type: %s)" % (val, type(val)) + raise EasyBuildError(error_pattern) + self.log.info("Enhanced sanity check paths after taking into account easyconfig file: %s", paths) + + sorted_keys = sorted(paths.keys()) + known_keys = sorted(path_keys_and_check.keys()) + + # verify sanity_check_paths value: only known keys, correct value types, at least one non-empty value + only_list_values = all(isinstance(x, list) for x in paths.values()) + only_empty_lists = all(not x for x in paths.values()) + if sorted_keys != known_keys or not only_list_values or only_empty_lists: + error_msg = "Incorrect format for sanity_check_paths: should (only) have %s keys, " + error_msg += "values should be lists (at least one non-empty)." + raise EasyBuildError(error_msg % ', '.join("'%s'" % k for k in known_keys)) + + # if enhance_sanity_check is not enabled, only sanity_check_commands specified in the easyconfig file are used, + # the ones provided by the easyblock (via custom_commands) are ignored + if ec_commands and not enhance_sanity_check: + commands = ec_commands + self.log.info("Using (only) sanity check commands specified by easyconfig file: %s", commands) else: - self.log.info("Using specified sanity check paths: %s" % paths) - - ks = sorted(paths.keys()) - valnottypes = [not isinstance(x, list) for x in paths.values()] - lenvals = [len(x) for x in paths.values()] - req_keys = sorted(path_keys_and_check.keys()) - if not ks == req_keys or sum(valnottypes) > 0 or sum(lenvals) == 0: - raise EasyBuildError("Incorrect format for sanity_check_paths (should (only) have %s keys, " - "values should be lists (at least one non-empty)).", ','.join(req_keys)) - - commands = self.cfg['sanity_check_commands'] - if not commands: if custom_commands: commands = custom_commands - self.log.info("Using customised sanity check commands: %s" % commands) + self.log.info("Using customised sanity check commands: %s", commands) else: commands = [] - self.log.info("Using specified sanity check commands: %s" % commands) + + # if enhance_sanity_check is enabled, the sanity_check_commands specified in the easyconfig file + # are combined with those provided by the easyblock via custom_commands + if enhance_sanity_check and ec_commands: + commands = commands + ec_commands + self.log.info("Enhanced sanity check commands after taking into account easyconfig file: %s", commands) for i, command in enumerate(commands): # set command to default. This allows for config files with @@ -2475,9 +2531,17 @@ def _sanity_check_step_dry_run(self, custom_paths=None, custom_commands=None, ** """ paths, path_keys_and_check, commands = self._sanity_check_step_common(custom_paths, custom_commands) - for key, (typ, _) in path_keys_and_check.items(): + for key in [SANITY_CHECK_PATHS_FILES, SANITY_CHECK_PATHS_DIRS]: + (typ, _) = path_keys_and_check[key] self.dry_run_msg("Sanity check paths - %s ['%s']", typ, key) - if paths[key]: + entries = paths[key] + if entries: + # some entries may be tuple values, + # we need to convert them to strings first so we can print them sorted + for idx, entry in enumerate(entries): + if isinstance(entry, tuple): + entries[idx] = ' or '.join(entry) + for path in sorted(paths[key]): self.dry_run_msg(" * %s", str(path)) else: @@ -2608,6 +2672,9 @@ def xs2str(xs): # run sanity check commands for command in commands: + + trace_msg("running command '%s' ..." % command) + out, ec = run_cmd(command, simple=False, log_ok=False, log_all=False, trace=False) if ec != 0: fail_msg = "sanity check command %s exited with code %s (output: %s)" % (command, ec, out) @@ -2616,7 +2683,7 @@ def xs2str(xs): else: self.log.info("sanity check command %s ran successfully! (output: %s)" % (command, out)) - trace_msg("running command '%s': %s" % (command, ('FAILED', 'OK')[ec == 0])) + trace_msg("result for command '%s': %s" % (command, ('FAILED', 'OK')[ec == 0])) # also run sanity check for extensions (unless we are an extension ourselves) if not extension: @@ -2723,6 +2790,7 @@ def make_module_step(self, fake=False): txt += self.make_module_description() txt += self.make_module_group_check() + txt += self.make_module_deppaths() txt += self.make_module_dep() txt += self.make_module_extend_modpath() txt += self.make_module_req() @@ -3049,30 +3117,14 @@ def run_all_steps(self, run_test_cases): if ignore_locks: self.log.info("Ignoring locks...") else: - locks_dir = build_option('locks_dir') or os.path.join(install_path('software'), '.locks') - lock_path = os.path.join(locks_dir, '%s.lock' % self.installdir.replace('/', '_')) - - # if lock already exists, either abort or wait until it disappears - if os.path.exists(lock_path): - wait_on_lock = build_option('wait_on_lock') - if wait_on_lock: - while os.path.exists(lock_path): - print_msg("lock %s exists, waiting %d seconds..." % (lock_path, wait_on_lock), - silent=self.silent) - time.sleep(wait_on_lock) - else: - raise EasyBuildError("Lock %s already exists, aborting!", lock_path) + lock_name = self.installdir.replace('/', '_') - # create lock to avoid that another installation running in parallel messes things up; - # we use a directory as a lock, since that's atomically created - try: - mkdir(lock_path, parents=True) - except EasyBuildError as err: - # clean up the error message a bit, get rid of the "Failed to create directory" part + quotes - stripped_err = str(err).split(':', 1)[1].strip().replace("'", '').replace('"', '') - raise EasyBuildError("Failed to create lock %s: %s", lock_path, stripped_err) + # check if lock already exists; + # either aborts with an error or waits until it disappears (depends on --wait-on-lock) + check_lock(lock_name) - self.log.info("Lock created: %s", lock_path) + # create lock to avoid that another installation running in parallel messes things up + create_lock(lock_name) try: for (step_name, descr, step_methods, skippable) in steps: @@ -3090,8 +3142,7 @@ def run_all_steps(self, run_test_cases): pass finally: if not ignore_locks: - remove_dir(lock_path) - self.log.info("Lock removed: %s", lock_path) + remove_lock(lock_name) # return True for successfull build (or stopped build) return True diff --git a/easybuild/framework/easyconfig/constants.py b/easybuild/framework/easyconfig/constants.py index 05fcb80d97..3bbc2cba91 100644 --- a/easybuild/framework/easyconfig/constants.py +++ b/easybuild/framework/easyconfig/constants.py @@ -51,4 +51,13 @@ 'OS_VERSION': (get_os_version(), "System version"), 'SYS_PYTHON_VERSION': (platform.python_version(), "System Python version (platform.python_version())"), 'SYSTEM': ({'name': 'system', 'version': 'system'}, "System toolchain"), + + 'OS_PKG_IBVERBS_DEV': (('libibverbs-dev', 'libibverbs-devel', 'rdma-core-devel'), + "OS packages providing ibverbs/infiniband development support"), + 'OS_PKG_OPENSSL_BIN': (('openssl'), + "OS packages providing the openSSL binary"), + 'OS_PKG_OPENSSL_LIB': (('libssl', 'libopenssl'), + "OS packages providing openSSL libraries"), + 'OS_PKG_OPENSSL_DEV': (('openssl-devel', 'libssl-dev', 'libopenssl-devel'), + "OS packages providing openSSL developement support"), } diff --git a/easybuild/framework/easyconfig/default.py b/easybuild/framework/easyconfig/default.py index 920e27d1bd..904a7d7bd7 100644 --- a/easybuild/framework/easyconfig/default.py +++ b/easybuild/framework/easyconfig/default.py @@ -90,6 +90,8 @@ 'easyblock': [None, "EasyBlock to use for building; if set to None, an easyblock is selected " "based on the software name", BUILD], 'easybuild_version': [None, "EasyBuild-version this spec-file was written for", BUILD], + 'enhance_sanity_check': [False, "Indicate that additional sanity check commands & paths should enhance " + "the existin sanity check, not replace it", BUILD], 'fix_perl_shebang_for': [None, "List of files for which Perl shebang should be fixed " "to '#!/usr/bin/env perl' (glob patterns supported)", BUILD], 'fix_python_shebang_for': [None, "List of files for which Python shebang should be fixed " @@ -152,6 +154,7 @@ 'multi_deps': [{}, "Dict of lists of dependency versions over which to iterate", DEPENDENCIES], 'multi_deps_load_default': [True, "Load module for first version listed in multi_deps by default", DEPENDENCIES], 'osdependencies': [[], "OS dependencies that should be present on the system", DEPENDENCIES], + 'moddependpaths': [None, "Absolute path(s) to prepend to MODULEPATH before loading dependencies", DEPENDENCIES], # LICENSE easyconfig parameters 'group': [None, "Name of the user group for which the software should be available; " diff --git a/easybuild/framework/easyconfig/easyconfig.py b/easybuild/framework/easyconfig/easyconfig.py index 3faeb84b90..d79eed817b 100644 --- a/easybuild/framework/easyconfig/easyconfig.py +++ b/easybuild/framework/easyconfig/easyconfig.py @@ -45,6 +45,7 @@ import os import re from distutils.version import LooseVersion +from contextlib import contextmanager import easybuild.tools.filetools as filetools from easybuild.base import fancylogger @@ -383,6 +384,23 @@ def get_toolchain_hierarchy(parent_toolchain, incl_capabilities=False): return toolchain_hierarchy +@contextmanager +def disable_templating(ec): + """Temporarily disable templating on the given EasyConfig + + Usage: + with disable_templating(ec): + # Do what you want without templating + # Templating set to previous value + """ + old_enable_templating = ec.enable_templating + ec.enable_templating = False + try: + yield old_enable_templating + finally: + ec.enable_templating = old_enable_templating + + class EasyConfig(object): """ Class which handles loading, reading, validation of easyconfigs @@ -592,18 +610,15 @@ def set_keys(self, params): """ # disable templating when setting easyconfig parameters # required to avoid problems with values that need more parsing to be done (e.g. dependencies) - prev_enable_templating = self.enable_templating - self.enable_templating = False - - for key in sorted(params.keys()): - # validations are skipped, just set in the config - if key in self._config.keys(): - self[key] = params[key] - self.log.info("setting easyconfig parameter %s: value %s (type: %s)", key, self[key], type(self[key])) - else: - raise EasyBuildError("Unknown easyconfig parameter: %s (value '%s')", key, params[key]) - - self.enable_templating = prev_enable_templating + with disable_templating(self): + for key in sorted(params.keys()): + # validations are skipped, just set in the config + if key in self._config.keys(): + self[key] = params[key] + self.log.info("setting easyconfig parameter %s: value %s (type: %s)", + key, self[key], type(self[key])) + else: + raise EasyBuildError("Unknown easyconfig parameter: %s (value '%s')", key, params[key]) def parse(self): """ @@ -647,42 +662,39 @@ def parse(self): # templating is disabled when parse_hook is called to allow for easy updating of mutable easyconfig parameters # (see also comment in resolve_template) - prev_enable_templating = self.enable_templating - self.enable_templating = False - - # if any lists of dependency versions are specified over which we should iterate, - # deal with them now, before calling parse hook, parsing of dependencies & iterative easyconfig parameters... - self.handle_multi_deps() - - parse_hook_msg = None - if self.path: - parse_hook_msg = "Running %s hook for %s..." % (PARSE, os.path.basename(self.path)) - - # trigger parse hook - hooks = load_hooks(build_option('hooks')) - run_hook(PARSE, hooks, args=[self], msg=parse_hook_msg) - - # parse dependency specifications - # it's important that templating is still disabled at this stage! - self.log.info("Parsing dependency specifications...") - self['dependencies'] = [self._parse_dependency(dep) for dep in self['dependencies']] - self['hiddendependencies'] = [self._parse_dependency(dep, hidden=True) for dep in self['hiddendependencies']] - - # need to take into account that builddependencies may need to be iterated over, - # i.e. when the value is a list of lists of tuples - builddeps = self['builddependencies'] - if builddeps and all(isinstance(x, (list, tuple)) for b in builddeps for x in b): - self.iterate_options.append('builddependencies') - builddeps = [[self._parse_dependency(dep, build_only=True) for dep in x] for x in builddeps] - else: - builddeps = [self._parse_dependency(dep, build_only=True) for dep in builddeps] - self['builddependencies'] = builddeps + with disable_templating(self): + # if any lists of dependency versions are specified over which we should iterate, + # deal with them now, before calling parse hook, parsing of dependencies & iterative easyconfig parameters + self.handle_multi_deps() - # keep track of parsed multi deps, they'll come in handy during sanity check & module steps... - self.multi_deps = self.get_parsed_multi_deps() + parse_hook_msg = None + if self.path: + parse_hook_msg = "Running %s hook for %s..." % (PARSE, os.path.basename(self.path)) + + # trigger parse hook + hooks = load_hooks(build_option('hooks')) + run_hook(PARSE, hooks, args=[self], msg=parse_hook_msg) + + # parse dependency specifications + # it's important that templating is still disabled at this stage! + self.log.info("Parsing dependency specifications...") + self['dependencies'] = [self._parse_dependency(dep) for dep in self['dependencies']] + self['hiddendependencies'] = [ + self._parse_dependency(dep, hidden=True) for dep in self['hiddendependencies'] + ] + + # need to take into account that builddependencies may need to be iterated over, + # i.e. when the value is a list of lists of tuples + builddeps = self['builddependencies'] + if builddeps and all(isinstance(x, (list, tuple)) for b in builddeps for x in b): + self.iterate_options.append('builddependencies') + builddeps = [[self._parse_dependency(dep, build_only=True) for dep in x] for x in builddeps] + else: + builddeps = [self._parse_dependency(dep, build_only=True) for dep in builddeps] + self['builddependencies'] = builddeps - # restore templating - self.enable_templating = prev_enable_templating + # keep track of parsed multi deps, they'll come in handy during sanity check & module steps... + self.multi_deps = self.get_parsed_multi_deps() # update templating dictionary self.generate_template_values() @@ -1108,63 +1120,57 @@ def dump(self, fp, always_overwrite=True, backup=False, explicit_toolchains=Fals :param always_overwrite: overwrite existing file at specified location without use of --force :param backup: create backup of existing file before overwriting it """ - orig_enable_templating = self.enable_templating - # templated values should be dumped unresolved - self.enable_templating = False - - # build dict of default values - default_values = dict([(key, DEFAULT_CONFIG[key][0]) for key in DEFAULT_CONFIG]) - default_values.update(dict([(key, self.extra_options[key][0]) for key in self.extra_options])) + with disable_templating(self): + # build dict of default values + default_values = dict([(key, DEFAULT_CONFIG[key][0]) for key in DEFAULT_CONFIG]) + default_values.update(dict([(key, self.extra_options[key][0]) for key in self.extra_options])) + + self.generate_template_values() + templ_const = dict([(quote_py_str(const[1]), const[0]) for const in TEMPLATE_CONSTANTS]) + + # create reverse map of templates, to inject template values where possible + # longer template values are considered first, shorter template keys get preference over longer ones + sorted_keys = sorted(self.template_values, key=lambda k: (len(self.template_values[k]), -len(k)), + reverse=True) + templ_val = OrderedDict([]) + for key in sorted_keys: + # shortest template 'key' is retained in case of duplicates + # ('namelower' is preferred over 'github_account') + # only template values longer than 2 characters are retained + if self.template_values[key] not in templ_val and len(self.template_values[key]) > 2: + templ_val[self.template_values[key]] = key + + toolchain_hierarchy = None + if not explicit_toolchains: + try: + toolchain_hierarchy = get_toolchain_hierarchy(self['toolchain']) + except EasyBuildError as err: + # don't fail hard just because we can't get the hierarchy + self.log.warning('Could not generate toolchain hierarchy for %s to use in easyconfig dump method, ' + 'error:\n%s', self['toolchain'], str(err)) - self.generate_template_values() - templ_const = dict([(quote_py_str(const[1]), const[0]) for const in TEMPLATE_CONSTANTS]) - - # create reverse map of templates, to inject template values where possible - # longer template values are considered first, shorter template keys get preference over longer ones - sorted_keys = sorted(self.template_values, key=lambda k: (len(self.template_values[k]), -len(k)), reverse=True) - templ_val = OrderedDict([]) - for key in sorted_keys: - # shortest template 'key' is retained in case of duplicates ('namelower' is preferred over 'github_account') - # only template values longer than 2 characters are retained - if self.template_values[key] not in templ_val and len(self.template_values[key]) > 2: - templ_val[self.template_values[key]] = key - - toolchain_hierarchy = None - if not explicit_toolchains: try: - toolchain_hierarchy = get_toolchain_hierarchy(self['toolchain']) - except EasyBuildError as err: - # don't fail hard just because we can't get the hierarchy - self.log.warning('Could not generate toolchain hierarchy for %s to use in easyconfig dump method, ' - 'error:\n%s', self['toolchain'], str(err)) + ectxt = self.parser.dump(self, default_values, templ_const, templ_val, + toolchain_hierarchy=toolchain_hierarchy) + except NotImplementedError as err: + raise NotImplementedError(err) - try: - ectxt = self.parser.dump(self, default_values, templ_const, templ_val, - toolchain_hierarchy=toolchain_hierarchy) - except NotImplementedError as err: - # need to restore enable_templating value in case this method is caught in a try/except block and ignored - # (the ability to dump is not a hard requirement for build success) - self.enable_templating = orig_enable_templating - raise NotImplementedError(err) + self.log.debug("Dumped easyconfig: %s", ectxt) - self.log.debug("Dumped easyconfig: %s", ectxt) + if build_option('dump_autopep8'): + autopep8_opts = { + 'aggressive': 1, # enable non-whitespace changes, but don't be too aggressive + 'max_line_length': 120, + } + self.log.info("Reformatting dumped easyconfig using autopep8 (options: %s)", autopep8_opts) + ectxt = autopep8.fix_code(ectxt, options=autopep8_opts) + self.log.debug("Dumped easyconfig after autopep8 reformatting: %s", ectxt) - if build_option('dump_autopep8'): - autopep8_opts = { - 'aggressive': 1, # enable non-whitespace changes, but don't be too aggressive - 'max_line_length': 120, - } - self.log.info("Reformatting dumped easyconfig using autopep8 (options: %s)", autopep8_opts) - ectxt = autopep8.fix_code(ectxt, options=autopep8_opts) - self.log.debug("Dumped easyconfig after autopep8 reformatting: %s", ectxt) + if not ectxt.endswith('\n'): + ectxt += '\n' - if not ectxt.endswith('\n'): - ectxt += '\n' - - write_file(fp, ectxt, always_overwrite=always_overwrite, backup=backup, verbose=backup) - - self.enable_templating = orig_enable_templating + write_file(fp, ectxt, always_overwrite=always_overwrite, backup=backup, verbose=backup) def _validate(self, attr, values): # private method """ @@ -1473,7 +1479,7 @@ def _parse_dependency(self, dep, hidden=False, build_only=False): # (true) boolean value simply indicates that a system toolchain is used elif isinstance(tc_spec, bool) and tc_spec: - tc = {'name': SYSTEM_TOOLCHAIN_NAME, 'version': ''} + tc = {'name': SYSTEM_TOOLCHAIN_NAME, 'version': ''} # two-element list/tuple value indicates custom toolchain specification elif isinstance(tc_spec, (list, tuple,)): @@ -1593,17 +1599,12 @@ def _generate_template_values(self, ignore=None): # step 1-3 work with easyconfig.templates constants # disable templating with creating dict with template values to avoid looping back to here via __getitem__ - prev_enable_templating = self.enable_templating - - self.enable_templating = False - - if self.template_values is None: - # if no template values are set yet, initiate with a minimal set of template values; - # this is important for easyconfig that use %(version_minor)s to define 'toolchain', - # which is a pretty weird use case, but fine... - self.template_values = template_constant_dict(self, ignore=ignore) - - self.enable_templating = prev_enable_templating + with disable_templating(self): + if self.template_values is None: + # if no template values are set yet, initiate with a minimal set of template values; + # this is important for easyconfig that use %(version_minor)s to define 'toolchain', + # which is a pretty weird use case, but fine... + self.template_values = template_constant_dict(self, ignore=ignore) # grab toolchain instance with templating support enabled, # which is important in case the Toolchain instance was not created yet @@ -1611,9 +1612,8 @@ def _generate_template_values(self, ignore=None): # get updated set of template values, now with toolchain instance # (which is used to define the %(mpi_cmd_prefix)s template) - self.enable_templating = False - template_values = template_constant_dict(self, ignore=ignore, toolchain=toolchain) - self.enable_templating = prev_enable_templating + with disable_templating(self): + template_values = template_constant_dict(self, ignore=ignore, toolchain=toolchain) # update the template_values dict self.template_values.update(template_values) @@ -1656,13 +1656,8 @@ def get_ref(self, key): # see also comments in resolve_template # temporarily disable templating - prev_enable_templating = self.enable_templating - self.enable_templating = False - - ref = self[key] - - # restore previous value for 'enable_templating' - self.enable_templating = prev_enable_templating + with disable_templating(self): + ref = self[key] return ref diff --git a/easybuild/framework/easyconfig/format/one.py b/easybuild/framework/easyconfig/format/one.py index 88d043bc61..350afdbb6c 100644 --- a/easybuild/framework/easyconfig/format/one.py +++ b/easybuild/framework/easyconfig/format/one.py @@ -97,6 +97,13 @@ class FormatOneZero(EasyConfigFormatConfigObj): PYHEADER_MANDATORY = ['version', 'name', 'toolchain', 'homepage', 'description'] PYHEADER_BLACKLIST = [] + def __init__(self, *args, **kwargs): + """FormatOneZero constructor.""" + super(FormatOneZero, self).__init__(*args, **kwargs) + + self.log = fancylogger.getLogger(self.__class__.__name__, fname=False) + self.strict_sanity_check_paths_keys = True + def validate(self): """Format validation""" # minimal checks @@ -168,11 +175,14 @@ def _reformat_line(self, param_name, param_val, outer=False, addlen=0): for item_key in ordered_item_keys: if item_key in param_val: item_val = param_val[item_key] + item_comments = self._get_item_comments(param_name, item_val) + elif param_name == 'sanity_check_paths' and not self.strict_sanity_check_paths_keys: + item_val = [] + item_comments = {} + self.log.info("Using default value for '%s' in sanity_check_paths: %s", item_key, item_val) else: raise EasyBuildError("Missing mandatory key '%s' in %s.", item_key, param_name) - item_comments = self._get_item_comments(param_name, item_val) - inline_comment = item_comments.get('inline', '') item_tmpl_dict = {'inline_comment': inline_comment} @@ -317,6 +327,10 @@ def dump(self, ecfg, default_values, templ_const, templ_val, toolchain_hierarchy :param templ_val: known template values :param toolchain_hierarchy: hierarchy of toolchains for easyconfig """ + # figoure out whether we should be strict about the format of sanity_check_paths; + # if enhance_sanity_check is set, then both files/dirs keys are not strictly required... + self.strict_sanity_check_paths_keys = not ecfg['enhance_sanity_check'] + # include header comments first dump = self.comments['header'][:] diff --git a/easybuild/framework/easyconfig/tweak.py b/easybuild/framework/easyconfig/tweak.py index eb55bbb481..60ed4e2c22 100644 --- a/easybuild/framework/easyconfig/tweak.py +++ b/easybuild/framework/easyconfig/tweak.py @@ -52,11 +52,13 @@ from easybuild.framework.easyconfig.format.format import DEPENDENCY_PARAMETERS from easybuild.framework.easyconfig.parser import fetch_parameters_from_easyconfig from easybuild.framework.easyconfig.tools import alt_easyconfig_paths +from easybuild.toolchains.compiler.systemcompiler import TC_CONSTANT_SYSTEM from easybuild.toolchains.gcccore import GCCcore from easybuild.tools.build_log import EasyBuildError, print_warning from easybuild.tools.config import build_option from easybuild.tools.filetools import read_file, write_file from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version +from easybuild.tools.py2vs3 import string_type from easybuild.tools.robot import resolve_dependencies, robot_find_easyconfig, search_easyconfigs from easybuild.tools.toolchain.toolchain import SYSTEM_TOOLCHAIN_NAME from easybuild.tools.toolchain.toolchain import TOOLCHAIN_CAPABILITIES @@ -907,8 +909,11 @@ def map_common_versionsuffixes(software_name, original_toolchain, toolchain_mapp if original_suffix in versionsuffix_mappings: if mapped_suffix != versionsuffix_mappings[original_suffix]: raise EasyBuildError("No unique versionsuffix mapping for %s in %s toolchain " - "hierarchy to %s toolchain hierarchy", original_suffix, - original_toolchain, toolchain_mapping[original_toolchain['name']]) + "hierarchy to %s toolchain hierarchy (mapped suffix was %s but " + "versionsuffix mappings were %s)", + original_suffix, original_toolchain, + toolchain_mapping[original_toolchain['name']], mapped_suffix, + versionsuffix_mappings) else: versionsuffix_mappings[original_suffix] = mapped_suffix @@ -1115,6 +1120,9 @@ def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mappin # Figure out the main versionsuffix (altered depending on toolchain in the loop below) versionsuffix = dep.get('versionsuffix', '') + # If versionsuffix is equal to None, it should be put to empty string + if versionsuffix is None: + versionsuffix = '' # If versionsuffix is in our mapping then we expect it to be updated if versionsuffix in versionsuffix_mapping: versionsuffix = versionsuffix_mapping[versionsuffix] @@ -1131,14 +1139,12 @@ def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mappin if len(version_components) > 1: # Have at least major.minor candidate_ver_list.append(r'%s\..*' % major_version) candidate_ver_list.append(r'.*') # Include a major version search - potential_version_mappings, highest_version = [], None for candidate_ver in candidate_ver_list: # if any potential version mappings were found already at this point, we don't add more if not potential_version_mappings: - for toolchain in toolchain_hierarchy: # determine search pattern based on toolchain, version prefix/suffix & version regex @@ -1155,6 +1161,21 @@ def find_potential_version_mappings(dep, toolchain_mapping, versionsuffix_mappin tweaked_ecs_paths, _ = alt_easyconfig_paths(tempfile.gettempdir(), tweaked_ecs=True) cand_paths = [path for path in cand_paths if not path.startswith(tweaked_ecs_paths)] + # if SYSTEM_TOOLCHAIN_NAME is used, it produces regex of the form + # -.eb, which can map to incompatible toolchains. + # For example Boost-1.68\..*.eb would match Boost-1.68.0-intel-2019a.eb + # This filters out such matches unless the toolchain in the easyconfig matches a system toolchain + if toolchain['name'] == SYSTEM_TOOLCHAIN_NAME: + cand_paths_filtered = [] + for path in cand_paths: + tc_candidate = fetch_parameters_from_easyconfig(read_file(path), ['toolchain'])[0] + if isinstance(tc_candidate, dict) and tc_candidate['name'] == SYSTEM_TOOLCHAIN_NAME: + cand_paths_filtered += [path] + if isinstance(tc_candidate, string_type) and tc_candidate == TC_CONSTANT_SYSTEM: + cand_paths_filtered += [path] + + cand_paths = cand_paths_filtered + # add what is left to the possibilities for path in cand_paths: version = fetch_parameters_from_easyconfig(read_file(path), ['version'])[0] diff --git a/easybuild/framework/extension.py b/easybuild/framework/extension.py index b44d5759fe..90ba521ecd 100644 --- a/easybuild/framework/extension.py +++ b/easybuild/framework/extension.py @@ -127,14 +127,14 @@ def __init__(self, mself, ext, extra_params=None): # make sure they are merged into self.cfg so they can be queried; # unknown easyconfig parameters are ignored since self.options may include keys only there for extensions; # this allows to specify custom easyconfig parameters on a per-extension basis - for key in self.options: + for key, value in self.options.items(): if key in self.cfg: - self.cfg[key] = resolve_template(self.options[key], self.cfg.template_values) + self.cfg[key] = value self.log.debug("Customising known easyconfig parameter '%s' for extension %s/%s: %s", - key, name, version, self.cfg[key]) + key, name, version, value) else: self.log.debug("Skipping unknown custom easyconfig parameter '%s' for extension %s/%s: %s", - key, name, version, self.options[key]) + key, name, version, value) self.sanity_check_fail_msgs = [] diff --git a/easybuild/framework/extensioneasyblock.py b/easybuild/framework/extensioneasyblock.py index 35b1bf4407..277a59fb45 100644 --- a/easybuild/framework/extensioneasyblock.py +++ b/easybuild/framework/extensioneasyblock.py @@ -103,7 +103,9 @@ def run(self, unpack_src=False): # unpack file if desired if unpack_src: targetdir = os.path.join(self.master.builddir, remove_unwanted_chars(self.name)) - self.ext_dir = extract_file("%s" % self.src, targetdir, extra_options=self.unpack_options) + self.ext_dir = extract_file(self.src, targetdir, extra_options=self.unpack_options, + change_into_dir=False) + change_dir(self.ext_dir) if self.start_dir and os.path.isdir(self.start_dir): self.log.debug("Using start_dir: %s", self.start_dir) diff --git a/easybuild/main.py b/easybuild/main.py index 415321dc9a..94ed2ea301 100644 --- a/easybuild/main.py +++ b/easybuild/main.py @@ -57,7 +57,7 @@ from easybuild.tools.containers.common import containerize from easybuild.tools.docs import list_software from easybuild.tools.filetools import adjust_permissions, cleanup, copy_file, copy_files, dump_index, load_index -from easybuild.tools.filetools import read_file, write_file +from easybuild.tools.filetools import read_file, register_lock_cleanup_signal_handlers, write_file from easybuild.tools.github import check_github, close_pr, new_branch_github, find_easybuild_easyconfig from easybuild.tools.github import install_github_token, list_prs, new_pr, new_pr_from_branch, merge_pr from easybuild.tools.github import sync_branch_with_develop, sync_pr_with_develop, update_branch, update_pr @@ -189,6 +189,9 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): :param do_build: whether or not to actually perform the build :param testing: enable testing mode """ + + register_lock_cleanup_signal_handlers() + # if $CDPATH is set, unset it, it'll only cause trouble... # see https://github.com/easybuilders/easybuild-framework/issues/2944 if 'CDPATH' in os.environ: @@ -518,5 +521,5 @@ def main(args=None, logfile=None, do_build=None, testing=False, modtool=None): main() except EasyBuildError as err: print_error(err.msg) - except KeyboardInterrupt: - print_error("Cancelled by user (keyboard interrupt)") + except KeyboardInterrupt as err: + print_error("Cancelled by user: %s" % err) diff --git a/easybuild/scripts/install_eb_dep.sh b/easybuild/scripts/install_eb_dep.sh index ccc6c74b88..82c34b774d 100755 --- a/easybuild/scripts/install_eb_dep.sh +++ b/easybuild/scripts/install_eb_dep.sh @@ -1,77 +1,81 @@ #!/bin/bash -set -e - if [ $# -ne 2 ]; then echo "Usage: $0 - " exit 1 fi -PKG=$1 -PREFIX=$2 -PKG_NAME=`echo $PKG | sed 's/-[^-]*$//g'` -PKG_VERSION=`echo $PKG | sed 's/.*-//g'` +set -eu + +PKG="$1" +PREFIX="$2" + +PKG_NAME="${PKG%-*}" +PKG_VERSION="${PKG##*-}" CONFIG_OPTIONS= PRECONFIG_CMD= -if [ x$PKG_NAME == 'xmodules' ] && [ x$PKG_VERSION == 'x3.2.10' ]; then +if [ "$PKG_NAME" == 'modules' ] && [ "$PKG_VERSION" == '3.2.10' ]; then PKG_URL="http://prdownloads.sourceforge.net/modules/${PKG}.tar.gz" BACKUP_PKG_URL="https://easybuilders.github.io/easybuild/files/${PKG}.tar.gz" - export PATH=$PREFIX/Modules/$PKG_VERSION/bin:$PATH - export MOD_INIT=$HOME/Modules/$PKG_VERSION/init/bash + export PATH="$PREFIX/Modules/$PKG_VERSION/bin:$PATH" + export MOD_INIT="$PREFIX/Modules/$PKG_VERSION/init/bash" -elif [ x$PKG_NAME == 'xmodules' ]; then +elif [ "$PKG_NAME" == 'modules' ]; then PKG_URL="http://prdownloads.sourceforge.net/modules/${PKG}.tar.gz" - export PATH=$PREFIX/bin:$PATH - export MOD_INIT=$HOME/init/bash + export PATH="$PREFIX/bin:$PATH" + export MOD_INIT="$PREFIX/init/bash" -elif [ x$PKG_NAME == 'xlua' ]; then +elif [ "$PKG_NAME" == 'lua' ]; then PKG_URL="http://downloads.sourceforge.net/project/lmod/${PKG}.tar.gz" BACKUP_PKG_URL="https://easybuilders.github.io/easybuild/files/${PKG}.tar.gz" PRECONFIG_CMD="make clean" CONFIG_OPTIONS='--with-static=yes' - export PATH=$PWD/$PKG:$PREFIX/bin:$PATH + export PATH="$PWD/$PKG:$PREFIX/bin:$PATH" -elif [ x$PKG_NAME == 'xLmod' ]; then +elif [ "$PKG_NAME" == 'Lmod' ]; then PKG_URL="https://github.com/TACC/Lmod/archive/${PKG_VERSION}.tar.gz" - export PATH=$PREFIX/lmod/$PKG_VERSION/libexec:$PATH - export MOD_INIT=$HOME/lmod/$PKG_VERSION/init/bash + export PATH="$PREFIX/lmod/$PKG_VERSION/libexec:$PATH" + export MOD_INIT="$PREFIX/lmod/$PKG_VERSION/init/bash" -elif [ x$PKG_NAME == 'xmodules-tcl' ]; then +elif [ "$PKG_NAME" == 'modules-tcl' ]; then # obtain tarball from upstream via http://modules.cvs.sourceforge.net/viewvc/modules/modules/?view=tar&revision=1.147 PKG_URL="https://easybuilders.github.io/easybuild/files/modules-tcl-${PKG_VERSION}.tar.gz" - export MODULESHOME=$PREFIX/$PKG/tcl # required by init/bash source script - export PATH=$MODULESHOME:$PATH - export MOD_INIT=$MODULESHOME/init/bash.in + export MODULESHOME="$PREFIX/$PKG/tcl" # required by init/bash source script + export PATH="$MODULESHOME:$PATH" + export MOD_INIT="$MODULESHOME/init/bash.in" else echo "ERROR: Unknown package name '$PKG_NAME'" exit 2 fi echo "Installing ${PKG} @ ${PREFIX}..." -mkdir -p ${PREFIX} -set +e -wget ${PKG_URL} && tar xfz *${PKG_VERSION}.tar.gz -if [ $? -ne 0 ] && [ ! -z $BACKUP_PKG_URL ]; then - rm -f *${PKG_VERSION}.tar.gz - wget ${BACKUP_PKG_URL} && tar xfz *${PKG_VERSION}.tar.gz +mkdir -p "${PREFIX}" +if ! wget "${PKG_URL}" && [ -n "$BACKUP_PKG_URL" ]; then + rm -f ./*"${PKG_VERSION}".tar.gz + wget "${BACKUP_PKG_URL}" fi -set -e + +tar xfz ./*"${PKG_VERSION}".tar.gz +rm ./*"${PKG_VERSION}".tar.gz # environment-modules needs a patch to work with Tcl8.6 -if [ x$PKG_NAME == 'xmodules' ] && [ x$PKG_VERSION == 'x3.2.10' ]; then +if [ "$PKG_NAME" == 'modules' ] && [ "$PKG_VERSION" == '3.2.10' ]; then wget -O 'modules-tcl8.6.patch' 'https://easybuilders.github.io/easybuild/files/modules-3.2.10-tcl8.6.patch' - patch ${PKG}/cmdModule.c modules-tcl8.6.patch + patch "${PKG}/cmdModule.c" modules-tcl8.6.patch fi -if [ x$PKG_NAME == 'xmodules-tcl' ]; then - mv modules $PREFIX/${PKG} +if [ "$PKG_NAME" == 'modules-tcl' ]; then + mv modules "$PREFIX/${PKG}" else - cd ${PKG} - if [[ ! -z $PRECONFIG_CMD ]]; then - eval ${PRECONFIG_CMD} + cd "${PKG}" + if [[ -n "$PRECONFIG_CMD" ]]; then + eval "${PRECONFIG_CMD}" fi - ./configure $CONFIG_OPTIONS --prefix=$PREFIX && make && make install + ./configure $CONFIG_OPTIONS --prefix="$PREFIX" && make && make install cd - > /dev/null + rm -r "${PKG}" fi + +set +eu diff --git a/easybuild/toolchains/mpi/craympich.py b/easybuild/toolchains/mpi/craympich.py index cf32237451..bd01662002 100644 --- a/easybuild/toolchains/mpi/craympich.py +++ b/easybuild/toolchains/mpi/craympich.py @@ -40,7 +40,7 @@ class CrayMPICH(Mpi): """Generic support for using Cray compiler wrappers""" # MPI support # no separate module, Cray compiler drivers always provide MPI support - MPI_MODULE_NAME = [] + MPI_MODULE_NAME = None MPI_FAMILY = TC_CONSTANT_MPICH MPI_TYPE = TC_CONSTANT_MPI_TYPE_MPICH diff --git a/easybuild/tools/config.py b/easybuild/tools/config.py index 0bcf31ab8b..3bca0194b7 100644 --- a/easybuild/tools/config.py +++ b/easybuild/tools/config.py @@ -102,6 +102,8 @@ DEFAULT_PNS = 'EasyBuildPNS' DEFAULT_PREFIX = os.path.join(os.path.expanduser('~'), ".local", "easybuild") DEFAULT_REPOSITORY = 'FileRepository' +DEFAULT_WAIT_ON_LOCK_INTERVAL = 60 +DEFAULT_WAIT_ON_LOCK_LIMIT = 0 EBROOT_ENV_VAR_ACTIONS = [ERROR, IGNORE, UNSET, WARN] LOADED_MODULES_ACTIONS = [ERROR, IGNORE, PURGE, UNLOAD, WARN] @@ -211,6 +213,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'subdir_user_modules', 'test_report_env_filter', 'testoutput', + 'wait_on_lock', 'umask', 'zip_logs', ], @@ -256,7 +259,7 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): 'use_f90cache', 'use_existing_modules', 'set_default_module', - 'wait_on_lock', + 'wait_on_lock_limit', ], True: [ 'cleanup_builddir', @@ -305,6 +308,9 @@ def mk_full_default_path(name, prefix=DEFAULT_PREFIX): DEFAULT_ALLOW_LOADED_MODULES: [ 'allow_loaded_modules', ], + DEFAULT_WAIT_ON_LOCK_INTERVAL: [ + 'wait_on_lock_interval', + ], } # build option that do not have a perfectly matching command line option BUILD_OPTIONS_OTHER = { diff --git a/easybuild/tools/filetools.py b/easybuild/tools/filetools.py index 20b8cd6335..207e80b815 100644 --- a/easybuild/tools/filetools.py +++ b/easybuild/tools/filetools.py @@ -40,7 +40,6 @@ """ import datetime import difflib -import distutils.dir_util import fileinput import glob import hashlib @@ -49,6 +48,7 @@ import os import re import shutil +import signal import stat import sys import tempfile @@ -60,7 +60,7 @@ from easybuild.tools import run # import build_log must stay, to use of EasyBuildLog from easybuild.tools.build_log import EasyBuildError, dry_run_msg, print_msg, print_warning -from easybuild.tools.config import GENERIC_EASYBLOCK_PKG, build_option +from easybuild.tools.config import DEFAULT_WAIT_ON_LOCK_INTERVAL, GENERIC_EASYBLOCK_PKG, build_option, install_path from easybuild.tools.py2vs3 import std_urllib, string_type from easybuild.tools.utilities import nub, remove_unwanted_chars @@ -156,6 +156,9 @@ '.tar.z': "tar xzf %(filepath)s", } +# global set of names of locks that were created in this session +global_lock_names = set() + class ZlibChecksum(object): """ @@ -372,7 +375,7 @@ def change_dir(path): return cwd -def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced=False): +def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced=False, change_into_dir=None): """ Extract file at given path to specified directory :param fn: path to file to extract @@ -381,8 +384,16 @@ def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced :param extra_options: extra options to pass to extract command :param overwrite: overwrite existing unpacked file :param forced: force extraction in (extended) dry run mode + :param change_into_dir: change into resulting directory; + None (current default) implies True, but this is deprecated, + this named argument should be set to False or True explicitely + (in a future major release, default will be changed to False) :return: path to directory (in case of success) """ + if change_into_dir is None: + _log.deprecated("extract_file function was called without specifying value for change_into_dir", '5.0') + change_into_dir = True + if not os.path.isfile(fn) and not build_option('extended_dry_run'): raise EasyBuildError("Can't extract file %s: no such file", fn) @@ -392,8 +403,8 @@ def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced abs_dest = os.path.abspath(dest) # change working directory - _log.debug("Unpacking %s in directory %s.", fn, abs_dest) - change_dir(abs_dest) + _log.debug("Unpacking %s in directory %s", fn, abs_dest) + cwd = change_dir(abs_dest) if not cmd: cmd = extract_cmd(fn, overwrite=overwrite) @@ -408,7 +419,18 @@ def extract_file(fn, dest, cmd=None, extra_options=None, overwrite=False, forced run.run_cmd(cmd, simple=True, force_in_dry_run=forced) - return find_base_dir() + # note: find_base_dir also changes into the base dir! + base_dir = find_base_dir() + + # if changing into obtained directory is not desired, + # change back to where we came from (unless that was a non-existing directory) + if not change_into_dir: + if cwd is None: + raise EasyBuildError("Can't change back to non-existing directory after extracting %s in %s", fn, dest) + else: + change_dir(cwd) + + return base_dir def which(cmd, retain_all=False, check_perms=True, log_ok=True, log_error=True): @@ -495,7 +517,13 @@ def pypi_source_urls(pkg_name): _log.debug("Failed to download %s to determine available PyPI URLs for %s", simple_url, pkg_name) res = [] else: - parsed_html = ElementTree.parse(urls_html) + urls_txt = read_file(urls_html) + + # ignore yanked releases (see https://pypi.org/help/#yanked) + # see https://github.com/easybuilders/easybuild-framework/issues/3301 + urls_txt = re.sub(r'', '', urls_txt) + + parsed_html = ElementTree.ElementTree(ElementTree.fromstring(urls_txt)) if hasattr(parsed_html, 'iter'): res = [a.attrib['href'] for a in parsed_html.iter('a')] else: @@ -758,6 +786,18 @@ def find_easyconfigs(path, ignore_dirs=None): return files +def find_glob_pattern(glob_pattern, fail_on_no_match=True): + """Find unique file/dir matching glob_pattern (raises error if more than one match is found)""" + if build_option('extended_dry_run'): + return glob_pattern + res = glob.glob(glob_pattern) + if len(res) == 0 and not fail_on_no_match: + return None + if len(res) != 1: + raise EasyBuildError("Was expecting exactly one match for '%s', found %d: %s", glob_pattern, len(res), res) + return res[0] + + def search_file(paths, query, short=False, ignore_dirs=None, silent=False, filename_only=False, terse=False, case_sensitive=False): """ @@ -1186,7 +1226,8 @@ def apply_patch(patch_file, dest, fn=None, copy=False, level=None, use_git_am=Fa workdir = tempfile.mkdtemp(prefix='eb-patch-') _log.debug("Extracting the patch to: %s", workdir) # extracting the patch - apatch_dir = extract_file(apatch, workdir) + apatch_dir = extract_file(apatch, workdir, change_into_dir=False) + change_dir(apatch_dir) apatch = os.path.join(apatch_dir, apatch_name) if level is None and build_option('extended_dry_run'): @@ -1476,6 +1517,131 @@ def mkdir(path, parents=False, set_gid=None, sticky=None): _log.debug("Not creating existing path %s" % path) +def det_lock_path(lock_name): + """ + Determine full path for lock with specifed name. + """ + locks_dir = build_option('locks_dir') or os.path.join(install_path('software'), '.locks') + return os.path.join(locks_dir, lock_name + '.lock') + + +def create_lock(lock_name): + """Create lock with specified name.""" + + lock_path = det_lock_path(lock_name) + _log.info("Creating lock at %s...", lock_path) + try: + # we use a directory as a lock, since that's atomically created + mkdir(lock_path, parents=True) + global_lock_names.add(lock_name) + except EasyBuildError as err: + # clean up the error message a bit, get rid of the "Failed to create directory" part + quotes + stripped_err = str(err).split(':', 1)[1].strip().replace("'", '').replace('"', '') + raise EasyBuildError("Failed to create lock %s: %s", lock_path, stripped_err) + _log.info("Lock created: %s", lock_path) + + +def check_lock(lock_name): + """ + Check whether a lock with specified name already exists. + + If it exists, either wait until it's released, or raise an error + (depending on --wait-on-lock configuration option). + """ + lock_path = det_lock_path(lock_name) + if os.path.exists(lock_path): + _log.info("Lock %s exists!", lock_path) + + wait_interval = build_option('wait_on_lock_interval') + wait_limit = build_option('wait_on_lock_limit') + + # --wait-on-lock is deprecated, should use --wait-on-lock-limit and --wait-on-lock-interval instead + wait_on_lock = build_option('wait_on_lock') + if wait_on_lock is not None: + depr_msg = "Use of --wait-on-lock is deprecated, use --wait-on-lock-limit and --wait-on-lock-interval" + _log.deprecated(depr_msg, '5.0') + + # if --wait-on-lock-interval has default value and --wait-on-lock is specified too, the latter wins + # (required for backwards compatibility) + if wait_interval == DEFAULT_WAIT_ON_LOCK_INTERVAL and wait_on_lock > 0: + wait_interval = wait_on_lock + + # if --wait-on-lock-limit is not specified we need to wait indefinitely if --wait-on-lock is specified, + # since the original semantics of --wait-on-lock was that it specified the waiting time interval (no limit) + if not wait_limit: + wait_limit = -1 + + # wait limit could be zero (no waiting), -1 (no waiting limit) or non-zero value (waiting limit in seconds) + if wait_limit != 0: + wait_time = 0 + while os.path.exists(lock_path) and (wait_limit == -1 or wait_time < wait_limit): + print_msg("lock %s exists, waiting %d seconds..." % (lock_path, wait_interval), + silent=build_option('silent')) + time.sleep(wait_interval) + wait_time += wait_interval + + if os.path.exists(lock_path) and wait_limit != -1 and wait_time >= wait_limit: + error_msg = "Maximum wait time for lock %s to be released reached: %s sec >= %s sec" + raise EasyBuildError(error_msg, lock_path, wait_time, wait_limit) + else: + _log.info("Lock %s was released!", lock_path) + else: + raise EasyBuildError("Lock %s already exists, aborting!", lock_path) + else: + _log.info("Lock %s does not exist", lock_path) + + +def remove_lock(lock_name): + """ + Remove lock with specified name. + """ + lock_path = det_lock_path(lock_name) + _log.info("Removing lock %s...", lock_path) + remove_dir(lock_path) + if lock_name in global_lock_names: + global_lock_names.remove(lock_name) + _log.info("Lock removed: %s", lock_path) + + +def clean_up_locks(): + """ + Clean up all still existing locks that were created in this session. + """ + for lock_name in list(global_lock_names): + remove_lock(lock_name) + + +def clean_up_locks_signal_handler(signum, frame): + """ + Signal handler, cleans up locks & exits with received signal number. + """ + + if not build_option('silent'): + print_warning("signal received (%s), cleaning up locks (%s)..." % (signum, ', '.join(global_lock_names))) + clean_up_locks() + + # by default, a KeyboardInterrupt is raised with SIGINT, so keep doing so + if signum == signal.SIGINT: + raise KeyboardInterrupt("keyboard interrupt") + else: + sys.exit(signum) + + +def register_lock_cleanup_signal_handlers(): + """ + Register signal handler for signals that cancel the current EasyBuild session, + so we can clean up the locks that were created first. + """ + signums = [ + signal.SIGABRT, + signal.SIGINT, # Ctrl-C + signal.SIGTERM, # signal 15, soft kill (like when Slurm job is cancelled or received timeout) + signal.SIGQUIT, # kinda like Ctrl-C + ] + for signum in signums: + signal.signal(signum, clean_up_locks_signal_handler) + + def expand_glob_paths(glob_paths): """Expand specified glob paths to a list of unique non-glob paths to only files.""" paths = [] @@ -1907,7 +2073,12 @@ def copy_file(path, target_path, force_in_dry_run=False): _log.info("Copied contents of file %s to %s", path, target_path) else: mkdir(os.path.dirname(target_path), parents=True) - shutil.copy2(path, target_path) + if os.path.exists(path): + shutil.copy2(path, target_path) + elif os.path.islink(path): + # special care for copying broken symlinks + link_target = os.readlink(path) + symlink(link_target, target_path) _log.info("%s copied to %s", path, target_path) except (IOError, OSError, shutil.Error) as err: raise EasyBuildError("Failed to copy file %s to %s: %s", path, target_path, err) @@ -1942,16 +2113,13 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k :param path: the original directory path :param target_path: path to copy the directory to :param force_in_dry_run: force running the command during dry run - :param dirs_exist_ok: wrapper around shutil.copytree option, which was added in Python 3.8 + :param dirs_exist_ok: boolean indicating whether it's OK if the target directory already exists - On Python >= 3.8 shutil.copytree is always used - On Python < 3.8 if 'dirs_exist_ok' is False - shutil.copytree is used - On Python < 3.8 if 'dirs_exist_ok' is True - distutils.dir_util.copy_tree is used + shutil.copytree is used if the target path does not exist yet; + if the target path already exists, the 'copy' function will be used to copy the contents of + the source path to the target path - Additional specified named arguments are passed down to shutil.copytree if used. - - Because distutils.dir_util.copy_tree supports only 'symlinks' named argument, - using any other will raise EasyBuildError. + Additional specified named arguments are passed down to shutil.copytree/copy if used. """ if not force_in_dry_run and build_option('extended_dry_run'): dry_run_msg("copied directory %s to %s" % (path, target_path)) @@ -1960,38 +2128,49 @@ def copy_dir(path, target_path, force_in_dry_run=False, dirs_exist_ok=False, **k if not dirs_exist_ok and os.path.exists(target_path): raise EasyBuildError("Target location %s to copy %s to already exists", target_path, path) - if sys.version_info >= (3, 8): - # on Python >= 3.8, shutil.copytree works fine, thanks to availability of dirs_exist_ok named argument - shutil.copytree(path, target_path, dirs_exist_ok=dirs_exist_ok, **kwargs) + # note: in Python >= 3.8 shutil.copytree works just fine thanks to the 'dirs_exist_ok' argument, + # but since we need to be more careful in earlier Python versions we use our own implementation + # in case the target directory exists and 'dirs_exist_ok' is enabled + if dirs_exist_ok and os.path.exists(target_path): + # if target directory already exists (and that's allowed via dirs_exist_ok), + # we need to be more careful, since shutil.copytree will fail (in Python < 3.8) + # if target directory already exists; + # so, recurse via 'copy' function to copy files/dirs in source path to target path + # (NOTE: don't use distutils.dir_util.copy_tree here, see + # https://github.com/easybuilders/easybuild-framework/issues/3306) + + entries = os.listdir(path) - elif dirs_exist_ok: - # use distutils.dir_util.copy_tree with Python < 3.8 if dirs_exist_ok is enabled + # take into account 'ignore' function that is supported by shutil.copytree + # (but not by 'copy_file' function used by 'copy') + ignore = kwargs.get('ignore') + if ignore: + ignored_entries = ignore(path, entries) + entries = [x for x in entries if x not in ignored_entries] - # first get value for symlinks named argument (if any) - preserve_symlinks = kwargs.pop('symlinks', False) + # determine list of paths to copy + paths_to_copy = [os.path.join(path, x) for x in entries] - # check if there are other named arguments (there shouldn't be, only 'symlinks' is supported) - if kwargs: - raise EasyBuildError("Unknown named arguments passed to copy_dir with dirs_exist_ok=True: %s", - ', '.join(sorted(kwargs.keys()))) - distutils.dir_util.copy_tree(path, target_path, preserve_symlinks=preserve_symlinks) + copy(paths_to_copy, target_path, + force_in_dry_run=force_in_dry_run, dirs_exist_ok=dirs_exist_ok, **kwargs) else: - # if dirs_exist_ok is not enabled, just use shutil.copytree + # if dirs_exist_ok is not enabled or target directory doesn't exist, just use shutil.copytree shutil.copytree(path, target_path, **kwargs) _log.info("%s copied to %s", path, target_path) - except (IOError, OSError) as err: + except (IOError, OSError, shutil.Error) as err: raise EasyBuildError("Failed to copy directory %s to %s: %s", path, target_path, err) -def copy(paths, target_path, force_in_dry_run=False): +def copy(paths, target_path, force_in_dry_run=False, **kwargs): """ Copy single file/directory or list of files and directories to specified location :param paths: path(s) to copy :param target_path: target location :param force_in_dry_run: force running the command during dry run + :param kwargs: additional named arguments to pass down to copy_dir """ if isinstance(paths, string_type): paths = [paths] @@ -2002,10 +2181,11 @@ def copy(paths, target_path, force_in_dry_run=False): full_target_path = os.path.join(target_path, os.path.basename(path)) mkdir(os.path.dirname(full_target_path), parents=True) - if os.path.isfile(path): + # copy broken symlinks only if 'symlinks=True' is used + if os.path.isfile(path) or (os.path.islink(path) and kwargs.get('symlinks')): copy_file(path, full_target_path, force_in_dry_run=force_in_dry_run) elif os.path.isdir(path): - copy_dir(path, full_target_path, force_in_dry_run=force_in_dry_run) + copy_dir(path, full_target_path, force_in_dry_run=force_in_dry_run, **kwargs) else: raise EasyBuildError("Specified path to copy is not an existing file or directory: %s", path) diff --git a/easybuild/tools/github.py b/easybuild/tools/github.py index 9eb9219dd8..25079ae5bc 100644 --- a/easybuild/tools/github.py +++ b/easybuild/tools/github.py @@ -50,7 +50,7 @@ from easybuild.framework.easyconfig.parser import EasyConfigParser from easybuild.tools.build_log import EasyBuildError, print_msg, print_warning from easybuild.tools.config import build_option -from easybuild.tools.filetools import apply_patch, copy_dir, copy_easyblocks, copy_framework_files +from easybuild.tools.filetools import apply_patch, change_dir, copy_dir, copy_easyblocks, copy_framework_files from easybuild.tools.filetools import det_patched_files, download_file, extract_file from easybuild.tools.filetools import get_easyblock_class_name, mkdir, read_file, symlink, which, write_file from easybuild.tools.py2vs3 import HTTPError, URLError, ascii_letters, urlopen @@ -360,7 +360,9 @@ def download_repo(repo=GITHUB_EASYCONFIGS_REPO, branch='master', account=GITHUB_ download_file(base_name, url, target_path, forced=True) _log.debug("%s downloaded to %s, extracting now" % (base_name, path)) - extracted_path = os.path.join(extract_file(target_path, path, forced=True), extracted_dir_name) + base_dir = extract_file(target_path, path, forced=True, change_into_dir=False) + change_dir(base_dir) + extracted_path = os.path.join(base_dir, extracted_dir_name) # check if extracted_path exists if not os.path.isdir(extracted_path): @@ -1333,11 +1335,18 @@ def new_branch_github(paths, ecs, commit_msg=None): @only_if_module_is_available('git', pkgname='GitPython') -def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None): +def new_pr_from_branch(branch_name, title=None, descr=None, pr_target_repo=None, pr_metadata=None, commit_msg=None): """ Create new pull request from specified branch on GitHub. """ + if descr is None: + descr = build_option('pr_descr') + if commit_msg is None: + commit_msg = build_option('pr_commit_msg') + if title is None: + title = build_option('pr_title') or commit_msg + pr_target_account = build_option('pr_target_account') pr_target_branch = build_option('pr_target_branch') if pr_target_repo is None: @@ -1550,19 +1559,15 @@ def new_pr(paths, ecs, title=None, descr=None, commit_msg=None): :param commit_msg: commit message to use """ - if descr is None: - descr = build_option('pr_descr') if commit_msg is None: commit_msg = build_option('pr_commit_msg') - if title is None: - title = build_option('pr_title') or commit_msg # create new branch in GitHub res = new_branch_github(paths, ecs, commit_msg=commit_msg) file_info, deleted_paths, _, branch_name, diff_stat, pr_target_repo = res new_pr_from_branch(branch_name, title=title, descr=descr, pr_target_repo=pr_target_repo, - pr_metadata=(file_info, deleted_paths, diff_stat)) + pr_metadata=(file_info, deleted_paths, diff_stat), commit_msg=commit_msg) def det_account_branch_for_pr(pr_id, github_user=None, pr_target_repo=None): diff --git a/easybuild/tools/module_generator.py b/easybuild/tools/module_generator.py index e3859880e1..c001b809d0 100644 --- a/easybuild/tools/module_generator.py +++ b/easybuild/tools/module_generator.py @@ -524,7 +524,8 @@ def _generate_extension_list(self): """ Generate a string with a comma-separated list of extensions. """ - exts_list = self.app.cfg['exts_list'] + # We need only name and version, so don't resolve templates + exts_list = self.app.cfg.get_ref('exts_list') extensions = ', '.join(sorted(['-'.join(ext[:2]) for ext in exts_list], key=str.lower)) return extensions diff --git a/easybuild/tools/module_naming_scheme/hierarchical_mns.py b/easybuild/tools/module_naming_scheme/hierarchical_mns.py index f6d7cf1423..1647f4c96e 100644 --- a/easybuild/tools/module_naming_scheme/hierarchical_mns.py +++ b/easybuild/tools/module_naming_scheme/hierarchical_mns.py @@ -41,9 +41,11 @@ CORE = 'Core' COMPILER = 'Compiler' MPI = 'MPI' +TOOLCHAIN = 'Toolchain' MODULECLASS_COMPILER = 'compiler' MODULECLASS_MPI = 'mpi' +MODULECLASS_TOOLCHAIN = 'toolchain' GCCCORE = GCCcore.NAME @@ -107,7 +109,11 @@ def det_toolchain_compilers_name_version(self, tc_comps): # no compiler in toolchain, system toolchain res = None elif len(tc_comps) == 1: - res = (tc_comps[0]['name'], self.det_full_version(tc_comps[0])) + tc_comp = tc_comps[0] + if tc_comp is None: + res = None + else: + res = (tc_comp['name'], self.det_full_version(tc_comp)) else: comp_versions = dict([(comp['name'], self.det_full_version(comp)) for comp in tc_comps]) comp_names = comp_versions.keys() @@ -135,6 +141,10 @@ def det_module_subdir(self, ec): if tc_comps is None: # no compiler in toolchain, system toolchain => Core module subdir = CORE + elif tc_comps == [None]: + # no info on toolchain compiler (cfr. Cray toolchains), + # then use toolchain name/version + subdir = os.path.join(TOOLCHAIN, ec.toolchain.name, ec.toolchain.version) else: tc_comp_name, tc_comp_ver = self.det_toolchain_compilers_name_version(tc_comps) tc_mpi = det_toolchain_mpi(ec) @@ -223,6 +233,10 @@ def det_modpath_extensions(self, ec): fullver = self.det_full_version(ec) paths.append(os.path.join(MPI, tc_comp_name, tc_comp_ver, ec['name'], fullver)) + # special case for Cray toolchains + elif modclass == MODULECLASS_TOOLCHAIN and tc_comp_info is None and ec.name.startswith('Cray'): + paths.append(os.path.join(TOOLCHAIN, ec.name, ec.version)) + return paths def expand_toolchain_load(self, ec=None): diff --git a/easybuild/tools/modules.py b/easybuild/tools/modules.py index 8a5323434d..9d5f8ce952 100644 --- a/easybuild/tools/modules.py +++ b/easybuild/tools/modules.py @@ -487,7 +487,11 @@ def module_wrapper_exists(self, mod_name, modulerc_fn='.modulerc', mod_wrapper_r """ Determine whether a module wrapper with specified name exists. Only .modulerc file in Tcl syntax is considered here. + DEPRECATED. Use exists() """ + self.log.deprecated('module_wrapper_exists is unreliable and should no longer be used. ' + + 'Use exists instead to check for an existing module or alias.', '5.0') + if mod_wrapper_regex_template is None: mod_wrapper_regex_template = "^[ ]*module-version (?P[^ ]*) %s$" @@ -528,24 +532,40 @@ def module_wrapper_exists(self, mod_name, modulerc_fn='.modulerc', mod_wrapper_r return wrapped_mod - def exist(self, mod_names, mod_exists_regex_template=r'^\s*\S*/%s.*:\s*$', skip_avail=False, maybe_partial=True): + def exist(self, mod_names, mod_exists_regex_template=None, skip_avail=False, maybe_partial=True): """ Check if modules with specified names exists. :param mod_names: list of module names - :param mod_exists_regex_template: template regular expression to search 'module show' output with + :param mod_exists_regex_template: DEPRECATED and unused :param skip_avail: skip checking through 'module avail', only check via 'module show' :param maybe_partial: indicates if the module name may be a partial module name """ + if mod_exists_regex_template is not None: + self.log.deprecated('mod_exists_regex_template is no longer used', '5.0') + def mod_exists_via_show(mod_name): """ Helper function to check whether specified module name exists through 'module show'. :param mod_name: module name """ - mod_exists_regex = mod_exists_regex_template % re.escape(mod_name) - txt = self.show(mod_name) - return bool(re.search(mod_exists_regex, txt, re.M)) + stderr = self.show(mod_name) + res = False + # Parse the output: + # - Skip whitespace + # - Any error -> Module does not exist + # - Check first non-whitespace line for something that looks like an absolute path terminated by a colon + mod_exists_regex = r'\s*/.+:\s*' + for line in stderr.split('\n'): + if OUTPUT_MATCHES['whitespace'].search(line): + continue + if OUTPUT_MATCHES['error'].search(line): + break + if re.match(mod_exists_regex, line): + res = True + break + return res if skip_avail: avail_mod_names = [] @@ -570,15 +590,6 @@ def mod_exists_via_show(mod_name): self.log.debug("checking whether hidden module %s exists via 'show'..." % mod_name) mod_exists = mod_exists_via_show(mod_name) - # if no module file was found, check whether specified module name can be a 'wrapper' module... - if not mod_exists: - self.log.debug("Module %s not found via module avail/show, checking whether it is a wrapper", mod_name) - wrapped_mod = self.module_wrapper_exists(mod_name) - if wrapped_mod is not None: - # module wrapper only really exists if the wrapped module file is also available - mod_exists = wrapped_mod in avail_mod_names or mod_exists_via_show(wrapped_mod) - self.log.debug("Result for existence check of wrapped module %s: %s", wrapped_mod, mod_exists) - self.log.debug("Result for existence check of %s module: %s", mod_name, mod_exists) mods_exist.append(mod_exists) @@ -643,7 +654,7 @@ def show(self, mod_name): ans = MODULE_SHOW_CACHE[key] self.log.debug("Found cached result for 'module show %s' with key '%s': %s", mod_name, key, ans) else: - ans = self.run_module('show', mod_name, check_output=False, return_output=True) + ans = self.run_module('show', mod_name, check_output=False, return_stderr=True) MODULE_SHOW_CACHE[key] = ans self.log.debug("Cached result for 'module show %s' with key '%s': %s", mod_name, key, ans) @@ -759,13 +770,15 @@ def run_module(self, *args, **kwargs): # also catch and check exit code exit_code = proc.returncode if kwargs.get('check_exit_code', True) and exit_code != 0: - raise EasyBuildError("Module command 'module %s' failed with exit code %s; stderr: %s; stdout: %s", - ' '.join(cmd_list[2:]), exit_code, stderr, stdout) + raise EasyBuildError("Module command '%s' failed with exit code %s; stderr: %s; stdout: %s", + ' '.join(cmd_list), exit_code, stderr, stdout) if kwargs.get('check_output', True): self.check_module_output(full_cmd, stdout, stderr) - if kwargs.get('return_output', False): + if kwargs.get('return_stderr', False): + return stderr + elif kwargs.get('return_output', False): return stdout + stderr else: # the module command was run with an outdated selected environment variables (see LD_ENV_VAR_KEYS list) @@ -1394,7 +1407,7 @@ def prepend_module_path(self, path, set_mod_paths=True, priority=None): def module_wrapper_exists(self, mod_name): """ Determine whether a module wrapper with specified name exists. - First check for wrapper defined in .modulerc.lua, fall back to also checking .modulerc (Tcl syntax). + DEPRECATED. Use exists() """ res = None @@ -1410,19 +1423,6 @@ def module_wrapper_exists(self, mod_name): return res - def exist(self, mod_names, skip_avail=False, maybe_partial=True): - """ - Check if modules with specified names exists. - - :param mod_names: list of module names - :param skip_avail: skip checking through 'module avail', only check via 'module show' - """ - # module file may be either in Tcl syntax (no file extension) or Lua sytax (.lua extension); - # the current configuration for matters little, since the module may have been installed with a different cfg; - # Lmod may pick up both Tcl and Lua module files, regardless of the EasyBuild configuration - return super(Lmod, self).exist(mod_names, mod_exists_regex_template=r'^\s*\S*/%s.*(\.lua)?:\s*$', - skip_avail=skip_avail, maybe_partial=maybe_partial) - def get_setenv_value_from_modulefile(self, mod_name, var_name): """ Get value for specific 'setenv' statement from module file for the specified module. diff --git a/easybuild/tools/options.py b/easybuild/tools/options.py index 2a09600f78..ebad095341 100644 --- a/easybuild/tools/options.py +++ b/easybuild/tools/options.py @@ -64,10 +64,10 @@ from easybuild.tools.config import DEFAULT_JOB_BACKEND, DEFAULT_LOGFILE_FORMAT, DEFAULT_MAX_FAIL_RATIO_PERMS from easybuild.tools.config import DEFAULT_MNS, DEFAULT_MODULE_SYNTAX, DEFAULT_MODULES_TOOL, DEFAULT_MODULECLASSES from easybuild.tools.config import DEFAULT_PATH_SUBDIRS, DEFAULT_PKG_RELEASE, DEFAULT_PKG_TOOL, DEFAULT_PKG_TYPE -from easybuild.tools.config import DEFAULT_PNS, DEFAULT_PREFIX, DEFAULT_REPOSITORY, EBROOT_ENV_VAR_ACTIONS, ERROR -from easybuild.tools.config import FORCE_DOWNLOAD_CHOICES, GENERAL_CLASS, IGNORE, JOB_DEPS_TYPE_ABORT_ON_ERROR -from easybuild.tools.config import JOB_DEPS_TYPE_ALWAYS_RUN, LOADED_MODULES_ACTIONS, WARN -from easybuild.tools.config import LOCAL_VAR_NAMING_CHECK_WARN, LOCAL_VAR_NAMING_CHECKS +from easybuild.tools.config import DEFAULT_PNS, DEFAULT_PREFIX, DEFAULT_REPOSITORY, DEFAULT_WAIT_ON_LOCK_INTERVAL +from easybuild.tools.config import DEFAULT_WAIT_ON_LOCK_LIMIT, EBROOT_ENV_VAR_ACTIONS, ERROR, FORCE_DOWNLOAD_CHOICES +from easybuild.tools.config import GENERAL_CLASS, IGNORE, JOB_DEPS_TYPE_ABORT_ON_ERROR, JOB_DEPS_TYPE_ALWAYS_RUN +from easybuild.tools.config import LOADED_MODULES_ACTIONS, LOCAL_VAR_NAMING_CHECK_WARN, LOCAL_VAR_NAMING_CHECKS, WARN from easybuild.tools.config import get_pretend_installpath, init, init_build_options, mk_full_default_path from easybuild.tools.configobj import ConfigObj, ConfigObjError from easybuild.tools.docs import FORMAT_TXT, FORMAT_RST @@ -76,9 +76,8 @@ from easybuild.tools.docs import list_easyblocks, list_toolchains from easybuild.tools.environment import restore_env, unset_env_vars from easybuild.tools.filetools import CHECKSUM_TYPE_SHA256, CHECKSUM_TYPES, install_fake_vsc, move_file, which -from easybuild.tools.github import GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO -from easybuild.tools.github import GITHUB_PR_DIRECTION_DESC, GITHUB_PR_ORDER_CREATED, GITHUB_PR_STATE_OPEN -from easybuild.tools.github import GITHUB_PR_STATES, GITHUB_PR_ORDERS, GITHUB_PR_DIRECTIONS +from easybuild.tools.github import GITHUB_EB_MAIN, GITHUB_PR_DIRECTION_DESC, GITHUB_PR_ORDER_CREATED +from easybuild.tools.github import GITHUB_PR_STATE_OPEN, GITHUB_PR_STATES, GITHUB_PR_ORDERS, GITHUB_PR_DIRECTIONS from easybuild.tools.github import HAVE_GITHUB_API, HAVE_KEYRING, VALID_CLOSE_PR_REASONS from easybuild.tools.github import fetch_easyblocks_from_pr, fetch_github_token from easybuild.tools.hooks import KNOWN_HOOKS @@ -442,8 +441,15 @@ def override_options(self): None, 'store_true', False), 'verify-easyconfig-filenames': ("Verify whether filename of specified easyconfigs matches with contents", None, 'store_true', False), - 'wait-on-lock': ("Wait interval (in seconds) to use when waiting for existing lock to be removed " - "(0: implies no waiting, but exiting with an error)", int, 'store', 0), + 'wait-on-lock': ("Wait for lock to be released; 0 implies no waiting (exit with an error if the lock " + "already exists), non-zero value specified waiting interval [DEPRECATED: " + "use --wait-on-lock-interval and --wait-on-lock-limit instead]", + int, 'store_or_None', None), + 'wait-on-lock-interval': ("Wait interval (in seconds) to use when waiting for existing lock to be removed", + int, 'store', DEFAULT_WAIT_ON_LOCK_INTERVAL), + 'wait-on-lock-limit': ("Maximum amount of time (in seconds) to wait until lock is released (0 means no " + "waiting at all, exit with error; -1 means no waiting limit, keep waiting)", + int, 'store', DEFAULT_WAIT_ON_LOCK_LIMIT), 'zip-logs': ("Zip logs that are copied to install directory, using specified command", None, 'store_or_None', 'gzip'), diff --git a/easybuild/tools/systemtools.py b/easybuild/tools/systemtools.py index 13e28cc6f3..3b6c209e6f 100644 --- a/easybuild/tools/systemtools.py +++ b/easybuild/tools/systemtools.py @@ -360,7 +360,7 @@ def get_cpu_arch_name(): if HAVE_ARCHSPEC: res = archspec_cpu_host() if res: - cpu_arch_name = res.name + cpu_arch_name = str(res.name) if cpu_arch_name is None: cpu_arch_name = UNKNOWN @@ -770,6 +770,7 @@ def get_system_info(): return { 'core_count': get_avail_core_count(), 'total_memory': get_total_memory(), + 'cpu_arch': get_cpu_architecture(), 'cpu_arch_name': get_cpu_arch_name(), 'cpu_model': get_cpu_model(), 'cpu_speed': get_cpu_speed(), diff --git a/easybuild/tools/testing.py b/easybuild/tools/testing.py index ec7d83ba37..7086c692fb 100644 --- a/easybuild/tools/testing.py +++ b/easybuild/tools/testing.py @@ -268,12 +268,11 @@ def post_easyconfigs_pr_test_report(pr_nr, test_report, msg, init_session_state, if system_info['cpu_arch_name'] != UNKNOWN: system_info['cpu_model'] += " (%s)" % system_info['cpu_arch_name'] - short_system_info = "%(hostname)s - %(os_type)s %(os_name)s %(os_version)s, %(cpu_model)s, Python %(pyver)s" % { + os_info = '%(hostname)s - %(os_type)s %(os_name)s %(os_version)s' % system_info + short_system_info = "%(os_info)s, %(cpu_arch)s, %(cpu_model)s, Python %(pyver)s" % { + 'os_info': os_info, + 'cpu_arch': system_info['cpu_arch'], 'cpu_model': system_info['cpu_model'], - 'hostname': system_info['hostname'], - 'os_name': system_info['os_name'], - 'os_type': system_info['os_type'], - 'os_version': system_info['os_version'], 'pyver': system_info['python_version'].split(' ')[0], } diff --git a/easybuild/tools/toolchain/mpi.py b/easybuild/tools/toolchain/mpi.py index 052c3e061d..abd24e4809 100644 --- a/easybuild/tools/toolchain/mpi.py +++ b/easybuild/tools/toolchain/mpi.py @@ -28,10 +28,12 @@ :author: Stijn De Weirdt (Ghent University) :author: Kenneth Hoste (Ghent University) """ +import copy import os import tempfile from distutils.version import LooseVersion +from easybuild.base import fancylogger import easybuild.tools.environment as env import easybuild.tools.toolchain as toolchain from easybuild.tools.build_log import EasyBuildError @@ -41,6 +43,95 @@ from easybuild.tools.toolchain.toolchain import Toolchain +_log = fancylogger.getLogger('tools.toolchain.mpi', fname=False) + + +def get_mpi_cmd_template(mpi_family, params, mpi_version=None): + """ + Return template for MPI command, for specified MPI family. + + :param mpi_family: MPI family to use to determine MPI command template + """ + + params = copy.deepcopy(params) + + mpi_cmd_template = build_option('mpi_cmd_template') + if mpi_cmd_template: + _log.info("Using specified template for MPI commands: %s", mpi_cmd_template) + else: + # different known mpirun commands + mpirun_n_cmd = "mpirun -n %(nr_ranks)s %(cmd)s" + mpi_cmds = { + toolchain.OPENMPI: mpirun_n_cmd, + toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)s %(cmd)s", + toolchain.INTELMPI: mpirun_n_cmd, + toolchain.MVAPICH2: mpirun_n_cmd, + toolchain.MPICH: mpirun_n_cmd, + toolchain.MPICH2: mpirun_n_cmd, + } + + # Intel MPI mpirun needs more work + if mpi_cmd_template is None: + + if mpi_family == toolchain.INTELMPI: + + if mpi_version is None: + raise EasyBuildError("Intel MPI version unknown, can't determine MPI command template!") + + # for old versions of Intel MPI, we need to use MPD + if LooseVersion(mpi_version) <= LooseVersion('4.1'): + + mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s" + + # set temporary dir for MPD + # note: this needs to be kept *short*, + # to avoid mpirun failing with "socket.error: AF_UNIX path too long" + # exact limit is unknown, but ~20 characters seems to be OK + env.setvar('I_MPI_MPD_TMPDIR', tempfile.gettempdir()) + mpd_tmpdir = os.environ['I_MPI_MPD_TMPDIR'] + if len(mpd_tmpdir) > 20: + _log.warning("$I_MPI_MPD_TMPDIR should be (very) short to avoid problems: %s", mpd_tmpdir) + + # temporary location for mpdboot and nodes files + tmpdir = tempfile.mkdtemp(prefix='mpi_cmd_for-') + + # set PBS_ENVIRONMENT, so that --file option for mpdboot isn't stripped away + env.setvar('PBS_ENVIRONMENT', "PBS_BATCH_MPI") + + # make sure we're always using mpd as process manager + # only required for/picked up by Intel MPI v4.1 or higher, no harm done for others + env.setvar('I_MPI_PROCESS_MANAGER', 'mpd') + + # create mpdboot file + mpdboot = os.path.join(tmpdir, 'mpdboot') + write_file(mpdboot, "localhost ifhn=localhost") + + params.update({'mpdbf': "--file=%s" % mpdboot}) + + # create nodes file + nodes = os.path.join(tmpdir, 'nodes') + write_file(nodes, "localhost\n" * int(params['nr_ranks'])) + + params.update({'nodesfile': "-machinefile %s" % nodes}) + + if mpi_family in mpi_cmds: + mpi_cmd_template = mpi_cmds[mpi_family] + _log.info("Using template MPI command '%s' for MPI family '%s'", mpi_cmd_template, mpi_family) + else: + raise EasyBuildError("Don't know which template MPI command to use for MPI family '%s'", mpi_family) + + missing = [] + for key in sorted(params.keys()): + tmpl = '%(' + key + ')s' + if tmpl not in mpi_cmd_template: + missing.append(tmpl) + if missing: + raise EasyBuildError("Missing templates in mpi-cmd-template value '%s': %s", + mpi_cmd_template, ', '.join(missing)) + + return mpi_cmd_template, params + + class Mpi(Toolchain): """General MPI-like class can't be used without creating new class M(Mpi) @@ -151,7 +242,8 @@ def _set_mpi_variables(self): if not self.options.get('32bit', None): suffix = '64' - for root in self.get_software_root(self.MPI_MODULE_NAME): + # take into account that MPI_MODULE_NAME could be None (see Cray toolchains) + for root in self.get_software_root(self.MPI_MODULE_NAME or []): self.variables.append_exists('MPI_LIB_STATIC', root, lib_dir, filename="lib%s.a" % self.MPI_LIBRARY_NAME, suffix=suffix) self.variables.append_exists('MPI_LIB_SHARED', root, lib_dir, filename="lib%s.so" % self.MPI_LIBRARY_NAME, @@ -191,79 +283,15 @@ def mpi_cmd_for(self, cmd, nr_ranks): 'cmd': cmd, } - mpi_cmd_template = build_option('mpi_cmd_template') - if mpi_cmd_template: - self.log.info("Using specified template for MPI commands: %s", mpi_cmd_template) - else: - # different known mpirun commands - mpirun_n_cmd = "mpirun -n %(nr_ranks)s %(cmd)s" - mpi_cmds = { - toolchain.OPENMPI: mpirun_n_cmd, - toolchain.QLOGICMPI: "mpirun -H localhost -np %(nr_ranks)s %(cmd)s", - toolchain.INTELMPI: mpirun_n_cmd, - toolchain.MVAPICH2: mpirun_n_cmd, - toolchain.MPICH: mpirun_n_cmd, - toolchain.MPICH2: mpirun_n_cmd, - } - mpi_family = self.mpi_family() - # Intel MPI mpirun needs more work - if mpi_cmd_template is None: - - if mpi_family == toolchain.INTELMPI: - - # for old versions of Intel MPI, we need to use MPD - impi_ver = self.get_software_version(self.MPI_MODULE_NAME)[0] - if LooseVersion(impi_ver) <= LooseVersion('4.1'): - - mpi_cmds[toolchain.INTELMPI] = "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s" - - # set temporary dir for MPD - # note: this needs to be kept *short*, - # to avoid mpirun failing with "socket.error: AF_UNIX path too long" - # exact limit is unknown, but ~20 characters seems to be OK - env.setvar('I_MPI_MPD_TMPDIR', tempfile.gettempdir()) - mpd_tmpdir = os.environ['I_MPI_MPD_TMPDIR'] - if len(mpd_tmpdir) > 20: - self.log.warning("$I_MPI_MPD_TMPDIR should be (very) short to avoid problems: %s", mpd_tmpdir) - - # temporary location for mpdboot and nodes files - tmpdir = tempfile.mkdtemp(prefix='mpi_cmd_for-') - - # set PBS_ENVIRONMENT, so that --file option for mpdboot isn't stripped away - env.setvar('PBS_ENVIRONMENT', "PBS_BATCH_MPI") - - # make sure we're always using mpd as process manager - # only required for/picked up by Intel MPI v4.1 or higher, no harm done for others - env.setvar('I_MPI_PROCESS_MANAGER', 'mpd') - - # create mpdboot file - mpdboot = os.path.join(tmpdir, 'mpdboot') - write_file(mpdboot, "localhost ifhn=localhost") - - params.update({'mpdbf': "--file=%s" % mpdboot}) - - # create nodes file - nodes = os.path.join(tmpdir, 'nodes') - write_file(nodes, "localhost\n" * int(nr_ranks)) - - params.update({'nodesfile': "-machinefile %s" % nodes}) - - if mpi_family in mpi_cmds.keys(): - mpi_cmd_template = mpi_cmds[mpi_family] - self.log.info("Using template MPI command '%s' for MPI family '%s'", mpi_cmd_template, mpi_family) - else: - raise EasyBuildError("Don't know which template MPI command to use for MPI family '%s'", mpi_family) + if mpi_family == toolchain.INTELMPI: + mpi_version = self.get_software_version(self.MPI_MODULE_NAME)[0] + else: + mpi_version = None - missing = [] - for key in sorted(params.keys()): - tmpl = '%(' + key + ')s' - if tmpl not in mpi_cmd_template: - missing.append(tmpl) - if missing: - raise EasyBuildError("Missing templates in mpi-cmd-template value '%s': %s", - mpi_cmd_template, ', '.join(missing)) + mpi_cmd_template, params = get_mpi_cmd_template(mpi_family, params, mpi_version=mpi_version) + self.log.info("Using MPI command template '%s' (params: %s)", mpi_cmd_template, params) try: res = mpi_cmd_template % params diff --git a/easybuild/tools/version.py b/easybuild/tools/version.py index 79824ff42b..540aa76b82 100644 --- a/easybuild/tools/version.py +++ b/easybuild/tools/version.py @@ -43,7 +43,7 @@ # recent setuptools versions will *TRANSFORM* something like 'X.Y.Zdev' into 'X.Y.Z.dev0', with a warning like # UserWarning: Normalizing '2.4.0dev' to '2.4.0.dev0' # This causes problems further up the dependency chain... -VERSION = LooseVersion('4.2.0') +VERSION = LooseVersion('4.2.1.dev0') UNKNOWN = 'UNKNOWN' diff --git a/test/framework/easyblock.py b/test/framework/easyblock.py index 25e9789d14..53eeb4c331 100644 --- a/test/framework/easyblock.py +++ b/test/framework/easyblock.py @@ -46,13 +46,14 @@ from easybuild.tools import config from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import get_module_syntax -from easybuild.tools.filetools import copy_dir, copy_file, mkdir, read_file, remove_file, write_file +from easybuild.tools.filetools import change_dir, copy_dir, copy_file, mkdir, read_file, remove_file, write_file from easybuild.tools.module_generator import module_generator from easybuild.tools.modules import reset_module_caches from easybuild.tools.utilities import time2str from easybuild.tools.version import get_git_revision, this_is_easybuild from easybuild.tools.py2vs3 import string_type + class EasyBlockTest(EnhancedTestCase): """ Baseclass for easyblock testcases """ @@ -515,6 +516,48 @@ def test_make_module_extra(self): for pattern in patterns: self.assertTrue(re.search(pattern, txt, re.M), "Pattern '%s' found in: %s" % (pattern, txt)) + def test_make_module_deppaths(self): + """Test for make_module_deppaths""" + init_config(build_options={'silent': True}) + + self.contents = '\n'.join([ + 'easyblock = "ConfigureMake"', + 'name = "pi"', + 'version = "3.14"', + 'homepage = "http://example.com"', + 'description = "test easyconfig"', + "toolchain = {'name': 'gompi', 'version': '2018a'}", + 'moddependpaths = "/path/to/mods"', + 'dependencies = [', + " ('FFTW', '3.3.7'),", + ']', + ]) + self.writeEC() + eb = EasyBlock(EasyConfig(self.eb_file)) + + eb.installdir = os.path.join(config.install_path(), 'pi', '3.14') + eb.check_readiness_step() + eb.make_builddir() + eb.prepare_step() + + if get_module_syntax() == 'Tcl': + use_load = '\n'.join([ + 'if { [ file isdirectory "/path/to/mods" ] } {', + ' module use "/path/to/mods"', + '}', + ]) + elif get_module_syntax() == 'Lua': + use_load = '\n'.join([ + 'if isDir("/path/to/mods") then', + ' prepend_path("MODULEPATH", "/path/to/mods")', + 'end', + ]) + else: + self.assertTrue(False, "Unknown module syntax: %s" % get_module_syntax()) + + expected = use_load + self.assertEqual(eb.make_module_deppaths().strip(), expected) + def test_make_module_dep(self): """Test for make_module_dep""" init_config(build_options={'silent': True}) @@ -1567,8 +1610,13 @@ def test_guess_start_dir(self): test_easyconfigs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') ec = process_easyconfig(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0] + cwd = os.getcwd() + self.assertTrue(os.path.exists(cwd)) + def check_start_dir(expected_start_dir): """Check start dir.""" + # make sure we're in an existing directory at the start + change_dir(cwd) eb = EasyBlock(ec['ec']) eb.silent = True eb.cfg['stop'] = 'patch' @@ -1928,6 +1976,73 @@ def test_time2str(self): error_pattern = "Incorrect value type provided to time2str, should be datetime.timedelta: <.* 'int'>" self.assertErrorRegex(EasyBuildError, error_pattern, time2str, 123) + def test_sanity_check_paths_verification(self): + """Test verification of sanity_check_paths w.r.t. keys & values.""" + + testdir = os.path.abspath(os.path.dirname(__file__)) + toy_ec = os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + eb = EasyBlock(EasyConfig(toy_ec)) + eb.dry_run = True + + error_pattern = r"Incorrect format for sanity_check_paths: " + error_pattern += r"should \(only\) have 'dirs', 'files' keys, " + error_pattern += r"values should be lists \(at least one non-empty\)." + + def run_sanity_check_step(sanity_check_paths, enhance_sanity_check): + """Helper function to run sanity check step, and do trivial check on generated output.""" + self.mock_stderr(True) + self.mock_stdout(True) + eb.cfg['sanity_check_paths'] = sanity_check_paths + eb.cfg['enhance_sanity_check'] = enhance_sanity_check + eb.sanity_check_step() + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + self.assertFalse(stderr) + self.assertTrue(stdout.startswith("Sanity check paths")) + + # partial sanity_check_paths, only allowed when using enhance_sanity_check + test_cases = [ + {'dirs': ['foo']}, + {'files': ['bar']}, + {'dirs': []}, + {'files': []}, + {'files': [], 'dirs': []}, + ] + for test_case in test_cases: + # without enhanced sanity check, these are all invalid sanity_check_paths values + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, False) + + # if enhance_sanity_check is enabled, these are acceptable sanity_check_step values + run_sanity_check_step(test_case, True) + + # some inputs are always invalid, regardless of enhance_sanity_check, due to wrong keys/values + test_cases = [ + {'foo': ['bar']}, + {'files': ['foo'], 'dirs': [], 'libs': ['libfoo.a']}, + {'files': ['foo'], 'libs': ['libfoo.a']}, + {'dirs': [], 'libs': ['libfoo.a']}, + ] + for test_case in test_cases: + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, False) + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, True) + + # non-list values yield different errors with/without enhance_sanity_check + error_pattern_bis = r"Incorrect value type in sanity_check_paths, should be a list: .*" + test_cases = [ + {'files': 123, 'dirs': []}, + {'files': [], 'dirs': 123}, + {'files': 'foo', 'dirs': []}, + {'files': [], 'dirs': 'foo'}, + ] + for test_case in test_cases: + self.assertErrorRegex(EasyBuildError, error_pattern, run_sanity_check_step, test_case, False) + self.assertErrorRegex(EasyBuildError, error_pattern_bis, run_sanity_check_step, test_case, True) + + # empty sanity_check_paths is always OK, since then the fallback to default bin + lib/lib64 kicks in + run_sanity_check_step({}, False) + run_sanity_check_step({}, True) + def suite(): """ return all the tests in this file """ diff --git a/test/framework/easyconfigparser.py b/test/framework/easyconfigparser.py index d684950974..be327d51de 100644 --- a/test/framework/easyconfigparser.py +++ b/test/framework/easyconfigparser.py @@ -180,11 +180,13 @@ def test_easyconfig_constants(self): system_constant = constants.pop('SYSTEM') self.assertEqual(system_constant, {'name': 'system', 'version': 'system'}) - # make sure both keys and values are only strings + # make sure both keys and values are of appropriate types for constant_name in constants: self.assertTrue(isinstance(constant_name, string_type), "Constant name %s is a string" % constant_name) val = constants[constant_name] - self.assertTrue(isinstance(val, (string_type, dict)), "Constant value %s is a string or dict" % val) + fail_msg = "The constant %s should have an acceptable type, found %s (%s)" % (constant_name, + type(val), str(val)) + self.assertTrue(isinstance(val, (string_type, dict, tuple)), fail_msg) # check a couple of randomly picked constant values self.assertEqual(constants['SOURCE_TAR_GZ'], '%(name)s-%(version)s.tar.gz') diff --git a/test/framework/easyconfigs/test_ecs/h/HPL/HPL-2.1-CrayCCE-5.1.29.eb b/test/framework/easyconfigs/test_ecs/h/HPL/HPL-2.1-CrayCCE-5.1.29.eb new file mode 100644 index 0000000000..607821faf2 --- /dev/null +++ b/test/framework/easyconfigs/test_ecs/h/HPL/HPL-2.1-CrayCCE-5.1.29.eb @@ -0,0 +1,14 @@ +easyblock = 'ConfigureMake' + +name = 'HPL' +version = '2.1' + +homepage = 'http://www.netlib.org/benchmark/hpl/' +description = "HPL, you know, LINPACK" + +toolchain = {'name': 'CrayCCE', 'version': '5.1.29'} + +source_urls = ['http://www.netlib.org/benchmark/%(namelower)s'] +sources = [SOURCELOWER_TAR_GZ] + +moduleclass = 'tools' diff --git a/test/framework/filetools.py b/test/framework/filetools.py index 6595a7884d..fd2e4e5c68 100644 --- a/test/framework/filetools.py +++ b/test/framework/filetools.py @@ -29,6 +29,7 @@ @author: Kenneth Hoste (Ghent University) @author: Stijn De Weirdt (Ghent University) @author: Ward Poelmans (Ghent University) +@author: Maxime Boissonneault (Compute Canada, Universite Laval) """ import datetime import glob @@ -148,6 +149,29 @@ def test_find_base_dir(self): os.chdir(tmpdir) self.assertTrue(os.path.samefile(foodir, ft.find_base_dir())) + def test_find_glob_pattern(self): + """test find_glob_pattern function""" + tmpdir = tempfile.mkdtemp() + os.mkdir(os.path.join(tmpdir, 'python2.7')) + os.mkdir(os.path.join(tmpdir, 'python2.7', 'include')) + os.mkdir(os.path.join(tmpdir, 'python3.5m')) + os.mkdir(os.path.join(tmpdir, 'python3.5m', 'include')) + + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python2.7*')), + os.path.join(tmpdir, 'python2.7')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python2.7*', 'include')), + os.path.join(tmpdir, 'python2.7', 'include')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python3.5*')), + os.path.join(tmpdir, 'python3.5m')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python3.5*', 'include')), + os.path.join(tmpdir, 'python3.5m', 'include')) + self.assertEqual(ft.find_glob_pattern(os.path.join(tmpdir, 'python3.6*'), False), None) + self.assertErrorRegex(EasyBuildError, "Was expecting exactly", ft.find_glob_pattern, + os.path.join(tmpdir, 'python3.6*')) + self.assertErrorRegex(EasyBuildError, "Was expecting exactly", ft.find_glob_pattern, + os.path.join(tmpdir, 'python*')) + + def test_encode_class_name(self): """Test encoding of class names.""" for (class_name, encoded_class_name) in self.class_names: @@ -1292,8 +1316,8 @@ def test_pypi_source_urls(self): eb340_url += 'easybuild-3.4.0.tar.gz#md5=267a056a77a8f77fccfbf56354364045' self.assertTrue(eb340_url, res) pattern = '^https://pypi.python.org/packages/[a-f0-9]{2}/[a-f0-9]{2}/[a-f0-9]{60}/' - pattern_md5 = pattern + 'easybuild-[0-9rc.]+.tar.gz#md5=[a-f0-9]{32}$' - pattern_sha256 = pattern + 'easybuild-[0-9rc.]+.tar.gz#sha256=[a-f0-9]{64}$' + pattern_md5 = pattern + 'easybuild-[0-9a-z.]+.tar.gz#md5=[a-f0-9]{32}$' + pattern_sha256 = pattern + 'easybuild-[0-9a-z.]+.tar.gz#sha256=[a-f0-9]{64}$' regex_md5 = re.compile(pattern_md5) regex_sha256 = re.compile(pattern_sha256) for url in res: @@ -1303,6 +1327,15 @@ def test_pypi_source_urls(self): # more than 50 releases at time of writing test, which always stay there self.assertTrue(len(res) > 50) + # check for Python package that has yanked releases, + # see https://github.com/easybuilders/easybuild-framework/issues/3301 + res = ft.pypi_source_urls('ipython') + self.assertTrue(isinstance(res, list) and res) + prefix = 'https://pypi.python.org/packages' + for entry in res: + self.assertTrue(entry.startswith(prefix), "'%s' should start with '%s'" % (entry, prefix)) + self.assertTrue('ipython' in entry, "Pattern 'ipython' should be found in '%s'" % entry) + def test_derive_alt_pypi_url(self): """Test derive_alt_pypi_url() function.""" url = 'https://pypi.python.org/packages/source/e/easybuild/easybuild-2.7.0.tar.gz' @@ -1327,7 +1360,8 @@ def test_apply_patch(self): """ Test apply_patch """ testdir = os.path.dirname(os.path.abspath(__file__)) tmpdir = self.test_prefix - path = ft.extract_file(os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz'), tmpdir) + toy_tar_gz = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz') + path = ft.extract_file(toy_tar_gz, tmpdir, change_into_dir=False) toy_patch_fn = 'toy-0.0_fix-silly-typo-in-printf-statement.patch' toy_patch = os.path.join(testdir, 'sandbox', 'sources', 'toy', toy_patch_fn) @@ -1475,22 +1509,45 @@ def test_copy_dir(self): ft.copy_dir(to_copy, testdir, dirs_exist_ok=True) self.assertTrue(sorted(os.listdir(to_copy)) == sorted(os.listdir(testdir))) - # if the directory already exists and 'dirs_exist_ok' is True and there is another named argument (ignore) - # we expect clean error on Python < 3.8 and pass the test on Python >= 3.8 - # NOTE: reused ignore from previous test + # check whether use of 'ignore' works if target path already exists and 'dirs_exist_ok' is enabled def ignore_func(_, names): return [x for x in names if '6.4.0-2.28' in x] shutil.rmtree(testdir) ft.mkdir(testdir) - if sys.version_info >= (3, 8): - ft.copy_dir(to_copy, testdir, dirs_exist_ok=True, ignore=ignore_func) - self.assertEqual(sorted(os.listdir(testdir)), expected) - self.assertFalse(os.path.exists(os.path.join(testdir, 'GCC-6.4.0-2.28.eb'))) - else: - error_pattern = "Unknown named arguments passed to copy_dir with dirs_exist_ok=True: ignore" - self.assertErrorRegex(EasyBuildError, error_pattern, ft.copy_dir, to_copy, testdir, - dirs_exist_ok=True, ignore=ignore_func) + ft.copy_dir(to_copy, testdir, dirs_exist_ok=True, ignore=ignore_func) + self.assertEqual(sorted(os.listdir(testdir)), expected) + self.assertFalse(os.path.exists(os.path.join(testdir, 'GCC-6.4.0-2.28.eb'))) + + # test copy_dir when broken symlinks are involved + srcdir = os.path.join(self.test_prefix, 'topdir_to_copy') + ft.mkdir(srcdir) + ft.write_file(os.path.join(srcdir, 'test.txt'), '123') + subdir = os.path.join(srcdir, 'subdir') + # introduce broken file symlink + foo_txt = os.path.join(subdir, 'foo.txt') + ft.write_file(foo_txt, 'bar') + ft.symlink(foo_txt, os.path.join(subdir, 'bar.txt')) + ft.remove_file(foo_txt) + # introduce broken dir symlink + subdir_tmp = os.path.join(srcdir, 'subdir_tmp') + ft.mkdir(subdir_tmp) + ft.symlink(subdir_tmp, os.path.join(srcdir, 'subdir_link')) + ft.remove_dir(subdir_tmp) + + target_dir = os.path.join(self.test_prefix, 'target_to_copy_to') + + # trying this without symlinks=True ends in tears, because bar.txt points to a non-existing file + self.assertErrorRegex(EasyBuildError, "Failed to copy directory", ft.copy_dir, srcdir, target_dir) + ft.remove_dir(target_dir) + + ft.copy_dir(srcdir, target_dir, symlinks=True) + + # copying directory with broken symlinks should also work if target directory already exists + ft.remove_dir(target_dir) + ft.mkdir(target_dir) + ft.mkdir(subdir) + ft.copy_dir(srcdir, target_dir, symlinks=True, dirs_exist_ok=True) # also test behaviour of copy_file under --dry-run build_options = { @@ -1509,7 +1566,7 @@ def ignore_func(_, names): self.mock_stdout(False) self.assertFalse(os.path.exists(target_dir)) - self.assertTrue(re.search("^copied directory .*/GCC to .*/GCC", txt)) + self.assertTrue(re.search("^copied directory .*/GCC to .*/%s" % os.path.basename(target_dir), txt)) # forced copy, even in dry run mode self.mock_stdout(True) @@ -1597,19 +1654,24 @@ def test_change_dir(self): def test_extract_file(self): """Test extract_file""" + cwd = os.getcwd() + testdir = os.path.dirname(os.path.abspath(__file__)) toy_tarball = os.path.join(testdir, 'sandbox', 'sources', 'toy', 'toy-0.0.tar.gz') self.assertFalse(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) - path = ft.extract_file(toy_tarball, self.test_prefix) + path = ft.extract_file(toy_tarball, self.test_prefix, change_into_dir=False) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) + # still in same directory as before if change_into_dir is set to False + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) shutil.rmtree(os.path.join(path, 'toy-0.0')) toy_tarball_renamed = os.path.join(self.test_prefix, 'toy_tarball') shutil.copyfile(toy_tarball, toy_tarball_renamed) - path = ft.extract_file(toy_tarball_renamed, self.test_prefix, cmd="tar xfvz %s") + path = ft.extract_file(toy_tarball_renamed, self.test_prefix, cmd="tar xfvz %s", change_into_dir=False) + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) shutil.rmtree(os.path.join(path, 'toy-0.0')) @@ -1622,17 +1684,56 @@ def test_extract_file(self): init_config(build_options=build_options) self.mock_stdout(True) - path = ft.extract_file(toy_tarball, self.test_prefix) + path = ft.extract_file(toy_tarball, self.test_prefix, change_into_dir=False) txt = self.get_stdout() self.mock_stdout(False) + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) self.assertTrue(os.path.samefile(path, self.test_prefix)) self.assertFalse(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0'))) self.assertTrue(re.search('running command "tar xzf .*/toy-0.0.tar.gz"', txt)) - path = ft.extract_file(toy_tarball, self.test_prefix, forced=True) + path = ft.extract_file(toy_tarball, self.test_prefix, forced=True, change_into_dir=False) self.assertTrue(os.path.exists(os.path.join(self.test_prefix, 'toy-0.0', 'toy.source'))) self.assertTrue(os.path.samefile(path, self.test_prefix)) + self.assertTrue(os.path.samefile(os.getcwd(), cwd)) + + build_options['extended_dry_run'] = False + init_config(build_options=build_options) + + ft.remove_dir(os.path.join(self.test_prefix, 'toy-0.0')) + + # a deprecation warning is printed (which is an error in this context) + # if the 'change_into_dir' named argument was left unspecified + error_pattern = "extract_file function was called without specifying value for change_into_dir" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.extract_file, toy_tarball, self.test_prefix) + self.allow_deprecated_behaviour() + + # make sure we're not in self.test_prefix now (checks below assumes so) + self.assertFalse(os.path.samefile(os.getcwd(), self.test_prefix)) + + # by default, extract_file changes to directory in which source file was unpacked + self.mock_stderr(True) + path = ft.extract_file(toy_tarball, self.test_prefix) + stderr = self.get_stderr().strip() + self.mock_stderr(False) + self.assertTrue(os.path.samefile(path, self.test_prefix)) + self.assertTrue(os.path.samefile(os.getcwd(), self.test_prefix)) + regex = re.compile("^WARNING: .*extract_file function was called without specifying value for change_into_dir") + self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr)) + + ft.change_dir(cwd) + self.assertFalse(os.path.samefile(os.getcwd(), self.test_prefix)) + + # no deprecation warning when change_into_dir is set to True + self.mock_stderr(True) + path = ft.extract_file(toy_tarball, self.test_prefix, change_into_dir=True) + stderr = self.get_stderr().strip() + self.mock_stderr(False) + + self.assertTrue(os.path.samefile(path, self.test_prefix)) + self.assertTrue(os.path.samefile(os.getcwd(), self.test_prefix)) + self.assertFalse(stderr) def test_remove(self): """Test remove_file, remove_dir and join remove functions.""" @@ -1715,7 +1816,7 @@ def test_index_functions(self): # test with specified path with and without trailing '/'s for path in [test_ecs, test_ecs + '/', test_ecs + '//']: index = ft.create_index(path) - self.assertEqual(len(index), 81) + self.assertEqual(len(index), 82) expected = [ os.path.join('b', 'bzip2', 'bzip2-1.0.6-GCC-4.9.2.eb'), @@ -1926,19 +2027,35 @@ def makedirs_in_test(*paths): def test_find_eb_script(self): """Test find_eb_script function.""" + + # make sure $EB_SCRIPT_PATH is not set already (used as fallback mechanism in find_eb_script) + if 'EB_SCRIPT_PATH' in os.environ: + del os.environ['EB_SCRIPT_PATH'] + self.assertTrue(os.path.exists(ft.find_eb_script('rpath_args.py'))) self.assertTrue(os.path.exists(ft.find_eb_script('rpath_wrapper_template.sh.in'))) self.assertErrorRegex(EasyBuildError, "Script 'no_such_script' not found", ft.find_eb_script, 'no_such_script') # put test script in place relative to location of 'eb' - ft.write_file(os.path.join(self.test_prefix, 'bin', 'eb'), '#!/bin/bash\necho "fake eb"') - ft.adjust_permissions(os.path.join(self.test_prefix, 'bin', 'eb'), stat.S_IXUSR) - os.environ['PATH'] = '%s:%s' % (os.path.join(self.test_prefix, 'bin'), os.getenv('PATH', '')) + fake_eb = os.path.join(self.test_prefix, 'bin', 'eb') + ft.write_file(fake_eb, '#!/bin/bash\necho "fake eb"') + ft.adjust_permissions(fake_eb, stat.S_IXUSR) + os.environ['PATH'] = '%s:%s' % (os.path.dirname(fake_eb), os.getenv('PATH', '')) - justatest = os.path.join(self.test_prefix, 'easybuild', 'scripts', 'justatest.sh') + justatest = os.path.join(self.test_prefix, 'easybuild', 'scripts', 'thisisjustatestscript.sh') ft.write_file(justatest, '#!/bin/bash') - self.assertTrue(os.path.samefile(ft.find_eb_script('justatest.sh'), justatest)) + self.assertTrue(os.path.samefile(ft.find_eb_script('thisisjustatestscript.sh'), justatest)) + + # $EB_SCRIPT_PATH can also be used (overrules 'eb' found via $PATH) + ft.remove_file(fake_eb) + os.environ['EB_SCRIPT_PATH'] = os.path.join(self.test_prefix, 'easybuild', 'scripts') + self.assertTrue(os.path.samefile(ft.find_eb_script('thisisjustatestscript.sh'), justatest)) + + # if script can't be found via either $EB_SCRIPT_PATH or location of 'eb', we get a clean error + del os.environ['EB_SCRIPT_PATH'] + error_pattern = "Script 'thisisjustatestscript.sh' not found at expected location" + self.assertErrorRegex(EasyBuildError, error_pattern, ft.find_eb_script, 'thisisjustatestscript.sh') def test_move_file(self): """Test move_file function""" @@ -2399,6 +2516,97 @@ def test_copy_framework_files(self): self.assertEqual(res['new'], expected_new) + def test_locks(self): + """Tests for lock-related functions.""" + + init_config(build_options={'silent': True}) + + # make sure that global list of locks is empty when we start off + self.assertFalse(ft.global_lock_names) + + # use a realistic lock name (cfr. EasyBlock.run_all_steps) + installdir = os.path.join(self.test_installpath, 'software', 'test', '1.2.3-foss-2019b-Python-3.7.4') + lock_name = installdir.replace('/', '_') + + # det_lock_path returns full path to lock with specified name + # (used internally by create_lock, check_lock, remove_lock) + lock_path = ft.det_lock_path(lock_name) + self.assertFalse(os.path.exists(lock_path)) + + locks_dir = os.path.dirname(lock_path) + self.assertFalse(os.path.exists(locks_dir)) + + # if lock doesn't exist yet, check_lock just returns + ft.check_lock(lock_name) + + # create lock, and check whether it actually was created + ft.create_lock(lock_name) + self.assertTrue(os.path.exists(lock_path)) + + # can't use os.path.samefile until locks_dir actually exists + self.assertTrue(os.path.samefile(locks_dir, os.path.join(self.test_installpath, 'software', '.locks'))) + + self.assertEqual(os.listdir(locks_dir), [lock_name + '.lock']) + + # if lock exists, then check_lock raises an error + self.assertErrorRegex(EasyBuildError, "Lock .* already exists", ft.check_lock, lock_name) + + # remove_lock should... remove the lock + ft.remove_lock(lock_name) + self.assertFalse(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), []) + + # no harm done if remove_lock is called if lock is already gone + ft.remove_lock(lock_name) + + # check_lock just returns again after lock is removed + ft.check_lock(lock_name) + + # global list of locks should be empty at this point + self.assertFalse(ft.global_lock_names) + + # calling clean_up_locks when there are no locks should not cause trouble + ft.clean_up_locks() + + ft.create_lock(lock_name) + self.assertEqual(ft.global_lock_names, set([lock_name])) + self.assertEqual(os.listdir(locks_dir), [lock_name + '.lock']) + + ft.clean_up_locks() + self.assertFalse(ft.global_lock_names) + self.assertFalse(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), []) + + # no problem with multiple locks + lock_names = [lock_name, 'test123', 'foo@bar%baz'] + lock_paths = [os.path.join(locks_dir, x + '.lock') for x in lock_names] + for ln in lock_names: + ft.create_lock(ln) + for lp in lock_paths: + self.assertTrue(os.path.exists(lp), "Path %s should exist" % lp) + + self.assertEqual(ft.global_lock_names, set(lock_names)) + expected_locks = sorted(ln + '.lock' for ln in lock_names) + self.assertEqual(sorted(os.listdir(locks_dir)), expected_locks) + + ft.clean_up_locks() + for lp in lock_paths: + self.assertFalse(os.path.exists(lp), "Path %s should not exist" % lp) + self.assertFalse(ft.global_lock_names) + self.assertEqual(os.listdir(locks_dir), []) + + # also test signal handler that is supposed to clean up locks + ft.create_lock(lock_name) + self.assertTrue(ft.global_lock_names) + self.assertTrue(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), [lock_name + '.lock']) + + # clean_up_locks_signal_handler causes sys.exit with specified exit code + self.assertErrorRegex(SystemExit, '15', ft.clean_up_locks_signal_handler, 15, None) + self.assertFalse(ft.global_lock_names) + self.assertFalse(os.path.exists(lock_path)) + self.assertEqual(os.listdir(locks_dir), []) + def suite(): """ returns all the testcases in this module """ diff --git a/test/framework/module_generator.py b/test/framework/module_generator.py index 81ce794218..439091a3a3 100644 --- a/test/framework/module_generator.py +++ b/test/framework/module_generator.py @@ -1273,6 +1273,11 @@ def test_ec(ecfile, short_modname, mod_subdir, modpath_exts, user_modpath_exts, ['MPI/intel-CUDA/%s-5.5.22/impi/5.1.2.150' % iccver], ['MPI/intel-CUDA/%s-5.5.22/impi/5.1.2.150' % iccver], ['Core']), + 'CrayCCE-5.1.29.eb': ('CrayCCE/5.1.29', 'Core', + ['Toolchain/CrayCCE/5.1.29'], + ['Toolchain/CrayCCE/5.1.29'], + ['Core']), + 'HPL-2.1-CrayCCE-5.1.29.eb': ('HPL/2.1', 'Toolchain/CrayCCE/5.1.29', [], [], ['Core']), } for ecfile, mns_vals in test_ecs.items(): test_ec(ecfile, *mns_vals) diff --git a/test/framework/modules.py b/test/framework/modules.py index 93015b4a07..808eb0df76 100644 --- a/test/framework/modules.py +++ b/test/framework/modules.py @@ -44,6 +44,7 @@ from easybuild.framework.easyblock import EasyBlock from easybuild.framework.easyconfig.easyconfig import EasyConfig from easybuild.tools.build_log import EasyBuildError +from easybuild.tools.environment import modify_env from easybuild.tools.filetools import adjust_permissions, copy_file, copy_dir, mkdir from easybuild.tools.filetools import read_file, remove_dir, remove_file, symlink, write_file from easybuild.tools.modules import EnvironmentModules, EnvironmentModulesC, EnvironmentModulesTcl, Lmod, NoModulesTool @@ -92,6 +93,104 @@ def test_long_module_path(self): shutil.rmtree(tmpdir) + def test_run_module(self): + """Test for ModulesTool.run_module method.""" + + testdir = os.path.dirname(os.path.abspath(__file__)) + + for key in ['EBROOTGCC', 'EBROOTOPENMPI', 'EBROOTOPENBLAS']: + if key in os.environ: + del os.environ[key] + + # arguments can be passed in two ways: multiple arguments, or just 1 list argument + self.modtool.run_module('load', 'GCC/6.4.0-2.28') + self.assertEqual(os.environ['EBROOTGCC'], '/prefix/software/GCC/6.4.0-2.28') + + # restore original environment + modify_env(os.environ, self.orig_environ, verbose=False) + self.reset_modulepath([os.path.join(testdir, 'modules')]) + + self.assertFalse('EBROOTGCC' in os.environ) + self.modtool.run_module(['load', 'GCC/6.4.0-2.28']) + self.assertEqual(os.environ['EBROOTGCC'], '/prefix/software/GCC/6.4.0-2.28') + + # skip tests that rely on exit codes when using EnvironmentModulesTcl modules tool, + # because it doesn't use proper exit codes + if not isinstance(self.modtool, EnvironmentModulesTcl): + + # by default, exit code is checked and an error is raised if we run something that fails + error_pattern = "Module command '.*thisdoesnotmakesense' failed with exit code [1-9]" + self.assertErrorRegex(EasyBuildError, error_pattern, self.modtool.run_module, 'thisdoesnotmakesense') + + # we need to use a different error pattern here with EnvironmentModulesC, + # because a load of a non-existing module doesnt' trigger a non-zero exit code... + # it will still fail though, just differently + if isinstance(self.modtool, EnvironmentModulesC): + error_pattern = "Unable to locate a modulefile for 'nosuchmodule/1.2.3'" + else: + error_pattern = "Module command '.*load nosuchmodule/1.2.3' failed with exit code [1-9]" + self.assertErrorRegex(EasyBuildError, error_pattern, self.modtool.run_module, 'load', 'nosuchmodule/1.2.3') + + # we can choose to blatently ignore the exit code, + # and also disable the output check that serves as a fallback; + # we also enable return_output here, because trying to apply the environment changes produced + # by a faulty command is bound to cause trouble... + kwargs = { + 'check_exit_code': False, + 'check_output': False, + 'return_output': True, + } + self.modtool.run_module('thisdoesnotmakesense', **kwargs) + self.modtool.run_module('load', 'nosuchmodule/1.2.3', **kwargs) + + # by default, the output (stdout+stderr) produced by the command is processed; + # result is a list of useful info (module names in case of list/avail) + res = self.modtool.run_module('list') + self.assertEqual(res, [{'mod_name': 'GCC/6.4.0-2.28', 'default': None}]) + + res = self.modtool.run_module('avail', 'GCC/4.6.3') + self.assertTrue(isinstance(res, list)) + self.assertEqual(sorted([x['mod_name'] for x in res]), ['GCC/4.6.3']) + + # loading a module produces no output, so we get an empty list + res = self.modtool.run_module('load', 'OpenMPI/2.1.2-GCC-6.4.0-2.28') + self.assertEqual(res, []) + self.assertEqual(os.environ['EBROOTOPENMPI'], '/prefix/software/OpenMPI/2.1.2-GCC-6.4.0-2.28') + + # we can opt into getting back the raw output (stdout + stderr); + # in that cases, the output includes Python statements to change the environment; + # the changes that would be made by the module command are *not* applied to the environment + out = self.modtool.run_module('load', 'OpenBLAS/0.2.20-GCC-6.4.0-2.28', return_output=True) + patterns = [ + r"^os.environ\[.EBROOTOPENBLAS.\]\s*=\s*./prefix/software/OpenBLAS/0.2.20-GCC-6.4.0-2.28.", + r"^os.environ\[.LOADEDMODULES.\]\s*=.*OpenBLAS/0.2.20-GCC-6.4.0-2.28", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(out), "Pattern '%s' should be found in: %s" % (regex.pattern, out)) + + # OpenBLAS module did *not* get loaded + self.assertFalse('EBROOTOPENBLAS' in os.environ) + res = self.modtool.list() + expected = ['GCC/6.4.0-2.28', 'OpenMPI/2.1.2-GCC-6.4.0-2.28', 'hwloc/1.11.8-GCC-6.4.0-2.28'] + self.assertEqual(sorted([x['mod_name'] for x in res]), expected) + + # we can also only obtain the stderr output (which contains the user-facing output), + # and just drop the stdout output (which contains the statements to change the environment) + out = self.modtool.run_module('show', 'OpenBLAS/0.2.20-GCC-6.4.0-2.28', return_stderr=True) + patterns = [ + r"test/framework/modules/OpenBLAS/0.2.20-GCC-6.4.0-2.28:\s*$", + r"setenv\W+EBROOTOPENBLAS.+/prefix/software/OpenBLAS/0.2.20-GCC-6.4.0-2.28", + r"prepend[_-]path\W+LD_LIBRARY_PATH.+/prefix/software/OpenBLAS/0.2.20-GCC-6.4.0-2.28/lib", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(out), "Pattern '%s' should be found in: %s" % (regex.pattern, out)) + + # show method only returns user-facing output (obtained via stderr), not changes to the environment + regex = re.compile(r'^os\.environ\[', re.M) + self.assertFalse(regex.search(out), "Pattern '%s' should not be found in: %s" % (regex.pattern, out)) + def test_avail(self): """Test if getting a (restricted) list of available modules works.""" self.init_testmods() @@ -168,6 +267,7 @@ def test_exist(self): java_mod_dir = os.path.join(self.test_prefix, 'Java') write_file(os.path.join(java_mod_dir, '1.8.0_181'), '#%Module') + write_file(os.path.join(self.test_prefix, 'toy', '42.1337'), '#%Module') if self.modtool.__class__ == EnvironmentModulesC: modulerc_tcl_txt = '\n'.join([ @@ -175,11 +275,18 @@ def test_exist(self): 'if {"Java/1.8" eq [module-info version Java/1.8]} {', ' module-version Java/1.8.0_181 1.8', '}', + 'if {"Java/site_default" eq [module-info version Java/site_default]} {', + ' module-version Java/1.8.0_181 site_default', + '}', ]) else: modulerc_tcl_txt = '\n'.join([ '#%Module', 'module-version Java/1.8.0_181 1.8', + 'module-version Java/1.8.0_181 site_default', + 'module-alias Java/Alias toy/42.1337', + # 'module-alias Java/NonExist non_existant/1', # (only) LMod has this in module avail, disable for now + 'module-alias JavaAlias Java/1.8.0_181', # LMod 7+ only ]) write_file(os.path.join(java_mod_dir, '.modulerc'), modulerc_tcl_txt) @@ -188,8 +295,20 @@ def test_exist(self): self.assertTrue('Java/1.8.0_181' in avail_mods) if isinstance(self.modtool, Lmod) and StrictVersion(self.modtool.version) >= StrictVersion('7.0'): self.assertTrue('Java/1.8' in avail_mods) + self.assertTrue('Java/site_default' in avail_mods) + self.assertTrue('JavaAlias' in avail_mods) + self.assertEqual(self.modtool.exist(['JavaAlias']), [True]) + self.assertEqual(self.modtool.exist(['Java/1.8', 'Java/1.8.0_181']), [True, True]) - self.assertEqual(self.modtool.module_wrapper_exists('Java/1.8'), 'Java/1.8.0_181') + + # module-version with different version suffix than the base module + self.assertEqual(self.modtool.exist(['Java/site_default']), [True]) + # Check for aliases: + # - completely different nameTrue, True, + # - alias to non existant module + # Skipped for EnvironmentModulesC as module-alias not working correctly there + if self.modtool.__class__ != EnvironmentModulesC: + self.assertEqual(self.modtool.exist(['Java/Alias', 'Java/NonExist']), [True, False]) reset_module_caches() @@ -199,8 +318,12 @@ def test_exist(self): self.assertTrue('Core/Java/1.8.0_181' in self.modtool.available()) self.assertEqual(self.modtool.exist(['Core/Java/1.8.0_181']), [True]) - self.assertEqual(self.modtool.exist(['Core/Java/1.8']), [True]) - self.assertEqual(self.modtool.module_wrapper_exists('Core/Java/1.8'), 'Core/Java/1.8.0_181') + # module-version only works for EnvironmentModules(C) as LMod and EnvironmentModulesTcl would need updating + # to full path, see https://github.com/TACC/Lmod/issues/446 + if isinstance(self.modtool, Lmod) or self.modtool.__class__ == EnvironmentModulesTcl: + self.assertEqual(self.modtool.exist(['Core/Java/1.8', 'Core/Java/site_default']), [False, False]) + else: + self.assertEqual(self.modtool.exist(['Core/Java/1.8', 'Core/Java/site_default']), [True, True]) # also check with .modulerc.lua for Lmod 7.8 or newer if isinstance(self.modtool, Lmod) and StrictVersion(self.modtool.version) >= StrictVersion('7.8'): @@ -208,13 +331,22 @@ def test_exist(self): reset_module_caches() remove_file(os.path.join(java_mod_dir, '.modulerc')) - write_file(os.path.join(java_mod_dir, '.modulerc.lua'), 'module_version("Java/1.8.0_181", "1.8")') + write_file(os.path.join(java_mod_dir, '.modulerc.lua'), + '\n'.join([ + 'module_version("Java/1.8.0_181", "1.8")', + 'module_version("Java/1.8.0_181", "site_default")', + 'module_alias("JavaAlias", "Java/1.8")', + ])) avail_mods = self.modtool.available() self.assertTrue('Java/1.8.0_181' in avail_mods) self.assertTrue('Java/1.8' in avail_mods) self.assertEqual(self.modtool.exist(['Java/1.8', 'Java/1.8.0_181']), [True, True]) - self.assertEqual(self.modtool.module_wrapper_exists('Java/1.8'), 'Java/1.8.0_181') + + # check for an alias with a different version suffix than the base module + self.assertEqual(self.modtool.exist(['Java/site_default']), [True]) + # And completely different name + self.assertEqual(self.modtool.exist(['JavaAlias']), [True]) reset_module_caches() @@ -222,8 +354,24 @@ def test_exist(self): shutil.move(java_mod_dir, os.path.join(self.test_prefix, 'Core', 'Java')) self.assertTrue('Core/Java/1.8.0_181' in self.modtool.available()) self.assertEqual(self.modtool.exist(['Core/Java/1.8.0_181']), [True]) - self.assertEqual(self.modtool.exist(['Core/Java/1.8']), [True]) - self.assertEqual(self.modtool.module_wrapper_exists('Core/Java/1.8'), 'Core/Java/1.8.0_181') + self.assertEqual(self.modtool.exist(['Core/Java/1.8']), [False]) + self.assertEqual(self.modtool.exist(['Core/Java/site_default']), [False]) + + # Test alias in home directory .modulerc + if isinstance(self.modtool, Lmod) and StrictVersion(self.modtool.version) >= StrictVersion('7.0'): + # Required or temporary HOME would be in MODULEPATH already + self.init_testmods() + # Sanity check: Module aliases don't exist yet + self.assertEqual(self.modtool.exist(['OpenMPI/99', 'OpenMPIAlias']), [False, False]) + # Use a temporary dir, not the users HOME + os.environ['HOME'] = tempfile.mkdtemp() + reset_module_caches() + write_file(os.path.join(os.environ['HOME'], '.modulerc'), '\n'.join([ + '#%Module', + 'module-version OpenMPI/2.1.2-GCC-6.4.0-2.28 99', + 'module-alias OpenMPIAlias OpenMPI/2.1.2-GCC-6.4.0-2.28', + ])) + self.assertEqual(self.modtool.exist(['OpenMPI/99', 'OpenMPIAlias']), [True, True]) def test_load(self): """ test if we load one module it is in the loaded_modules """ @@ -284,6 +432,25 @@ def test_load(self): self.assertEqual(os.environ.get('EBROOTGCC'), None) self.assertFalse(loaded_modules[-1] == 'GCC/6.4.0-2.28') + def test_show(self): + """Test for ModulesTool.show method.""" + + out = self.modtool.show('GCC/7.3.0-2.30') + + patterns = [ + # full path to module is included in output of 'show' + r"test/framework/modules/GCC/7.3.0-2.30:\s*$", + r"setenv\W+EBROOTGCC.+prefix/software/GCC/7.3.0-2.30", + r"^prepend[_-]path\W+PATH.+/prefix/software/GCC/7.3.0-2.30/bin", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(out), "Pattern '%s' should be found in: %s" % (regex.pattern, out)) + + # show method only returns user-facing output (obtained via stderr), not changes to the environment + regex = re.compile(r'^os\.environ\[', re.M) + self.assertFalse(regex.search(out), "Pattern '%s' should not be found in: %s" % (regex.pattern, out)) + def test_curr_module_paths(self): """Test for curr_module_paths function.""" @@ -544,24 +711,6 @@ def test_modulefile_path(self): res = modtool.modulefile_path('bzip2/.1.0.6', strip_ext=True) self.assertTrue(res.endswith('test/framework/modules/bzip2/.1.0.6')) - # hack into 'module show GCC/6.4.0-2.28' cache and inject alternate output that modulecmd.tcl sometimes produces - # make sure we only extract the module file path, nothing else... - # cfr. https://github.com/easybuilders/easybuild/issues/368 - modulepath = os.environ['MODULEPATH'].split(':') - mod_show_cache_key = modtool.mk_module_cache_key('GCC/6.4.0-2.28') - mod.MODULE_SHOW_CACHE[mod_show_cache_key] = '\n'.join([ - "import os", - "os.environ['MODULEPATH_modshare'] = '%s'" % ':'.join(m + ':1' for m in modulepath), - "os.environ['MODULEPATH'] = '%s'" % ':'.join(modulepath), - "------------------------------------------------------------------------------", - "%s:" % gcc_mod_file, - "------------------------------------------------------------------------------", - # remainder of output doesn't really matter in this context - "setenv EBROOTGCC /prefix/GCC/6.4.0-2.28" - ]) - res = modtool.modulefile_path('GCC/6.4.0-2.28') - self.assertTrue(os.path.samefile(res, os.path.join(test_dir, 'modules', 'GCC', '6.4.0-2.28'))) - reset_module_caches() def test_path_to_top_of_module_tree(self): @@ -891,7 +1040,7 @@ def test_modules_tool_stateless(self): # exact error message depends on Lmod version load_err_msg = '|'.join([ r'These[\s\sn]*module\(s\)[\s\sn]*exist[\s\sn]*but[\s\sn]*cannot[\s\sn]*be', - 'The[\s\sn]*following[\s\sn]*module\(s\)[\s\sn]*are[\s\sn]*unknown', + r'The[\s\sn]*following[\s\sn]*module\(s\)[\s\sn]*are[\s\sn]*unknown', ]) else: load_err_msg = "Unable to locate a modulefile" @@ -1060,7 +1209,7 @@ def test_load_in_hierarchy(self): def test_exit_code_check(self): """Verify that EasyBuild checks exit code of executed module commands""" if isinstance(self.modtool, Lmod): - error_pattern = "Module command 'module load nosuchmoduleavailableanywhere' failed with exit code" + error_pattern = "Module command '.*load nosuchmoduleavailableanywhere' failed with exit code" else: # Tcl implementations exit with 0 even when a non-existing module is loaded... error_pattern = "Unable to locate a modulefile for 'nosuchmoduleavailableanywhere'" @@ -1101,7 +1250,7 @@ def check_loaded_modules(): r"^\* GCC/6.4.0-2.28", r"^\* hwloc/1.11.8-GCC-6.4.0-2.28", r"^\* OpenMPI/2.1.2-GCC-6.4.0-2.28", - "This is not recommended since it may affect the installation procedure\(s\) performed by EasyBuild.", + r"This is not recommended since it may affect the installation procedure\(s\) performed by EasyBuild.", "To make EasyBuild allow particular loaded modules, use the --allow-loaded-modules configuration option.", "To specify action to take when loaded modules are detected, use " "--detect-loaded-modules={error,ignore,purge,unload,warn}", @@ -1119,7 +1268,7 @@ def check_loaded_modules(): # error mentioning 1 non-allowed module (OpenMPI), both GCC and hwloc loaded modules are allowed error_pattern = r"Found one or more non-allowed loaded .* module.*\n" - error_pattern += "\* OpenMPI/2.1.2-GCC-6.4.0-2.28\n\nThis is not" + error_pattern += r"\* OpenMPI/2.1.2-GCC-6.4.0-2.28\n\nThis is not" self.assertErrorRegex(EasyBuildError, error_pattern, self.modtool.check_loaded_modules) # check for warning message when purge is being run on loaded modules diff --git a/test/framework/options.py b/test/framework/options.py index a755b7d7c4..938c2d8182 100644 --- a/test/framework/options.py +++ b/test/framework/options.py @@ -2750,7 +2750,7 @@ def test_xxx_include_easyblocks_from_pr(self): write_file(self.logfile, '') args = [ - '--from-pr=9979', # PR for CMake easyconfig + '--from-pr=10487', # PR for CMake easyconfig '--include-easyblocks-from-pr=1936', # PR for EB_CMake easyblock '--unittest-file=%s' % self.logfile, '--github-user=%s' % GITHUB_TEST_ACCOUNT, @@ -2760,8 +2760,8 @@ def test_xxx_include_easyblocks_from_pr(self): logtxt = read_file(self.logfile) # easyconfig from pr is found - ec_pattern = os.path.join(self.test_prefix, '.*', 'files_pr9979', 'c', 'CMake', - 'CMake-3.16.4-GCCcore-9.2.0.eb') + ec_pattern = os.path.join(self.test_prefix, '.*', 'files_pr10487', 'c', 'CMake', + 'CMake-3.16.4-GCCcore-9.3.0.eb') ec_regex = re.compile(r"Parsing easyconfig file %s" % ec_pattern, re.M) self.assertTrue(ec_regex.search(logtxt), "Pattern '%s' found in: %s" % (ec_regex.pattern, logtxt)) @@ -3308,6 +3308,7 @@ def test_new_pr_from_branch(self): '--new-pr-from-branch=%s' % test_branch, '--github-user=%s' % GITHUB_TEST_ACCOUNT, # used to get GitHub token '--github-org=boegel', # used to determine account to grab branch from + '--pr-descr="an easyconfig for toy"', '-D', ] txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) @@ -3326,6 +3327,7 @@ def test_new_pr_from_branch(self): r"\* target: easybuilders/easybuild-easyconfigs:develop$", r"^\* from: boegel/easybuild-easyconfigs:test_new_pr_from_branch_DO_NOT_REMOVE$", r'^\* title: "\{tools\}\[system/system\] toy v0\.0"$', + r'^"an easyconfig for toy"$', r"^ 1 file changed, 32 insertions\(\+\)$", r"^\* overview of changes:\n easybuild/easyconfigs/t/toy/toy-0\.0\.eb | 32", ] @@ -4263,54 +4265,128 @@ def test_list_prs(self): def test_list_software(self): """Test --list-software and --list-installed-software.""" - test_ecs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'v1.0') - args = [ - '--list-software', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False) - expected = '\n'.join([ - "== Processed 5/5 easyconfigs...", - "== Found 2 different software packages", - '', - "* GCC", - "* gzip", - '', + + # copy selected test easyconfigs for testing --list-*software options with; + # full test is a nuisance, because all dependencies must be available and toolchains like intel must have + # all expected components when testing with HierarchicalMNS (which the test easyconfigs don't always have) + topdir = os.path.dirname(os.path.abspath(__file__)) + + cray_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'c', 'CrayCCE', 'CrayCCE-5.1.29.eb') + gcc_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'GCC', 'GCC-4.6.3.eb') + gzip_ec = os.path.join(topdir, 'easyconfigs', 'v1.0', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb') + gzip_system_ec = os.path.join(topdir, 'easyconfigs', 'v1.0', 'g', 'gzip', 'gzip-1.4.eb') + + test_ecs = os.path.join(self.test_prefix, 'test_ecs') + for ec in [cray_ec, gcc_ec, gzip_ec, gzip_system_ec]: + subdirs = os.path.dirname(ec).split(os.path.sep)[-2:] + target_dir = os.path.join(test_ecs, *subdirs) + mkdir(target_dir, parents=True) + copy_file(ec, target_dir) + + # add (fake) HPL easyconfig using CrayCCE toolchain + # (required to trigger bug reported in https://github.com/easybuilders/easybuild-framework/issues/3265) + hpl_cray_ec_txt = '\n'.join([ + 'easyblock = "ConfigureMake"', + 'name = "HPL"', + 'version = "2.3"', + "homepage = 'http://www.netlib.org/benchmark/hpl/'", + 'description = "HPL"', + 'toolchain = {"name": "CrayCCE", "version": "5.1.29"}', ]) - self.assertTrue(txt.endswith(expected)) + hpl_cray_ec = os.path.join(self.test_prefix, 'test_ecs', 'h', 'HPL', 'HPL-2.3-CrayCCE-5.1.29.eb') + write_file(hpl_cray_ec, hpl_cray_ec_txt) - args = [ - '--list-software=detailed', - '--output-format=rst', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, testing=False) - self.assertTrue(re.search(r'^\*GCC\*', txt, re.M)) - self.assertTrue(re.search(r'^``4.6.3``\s+``system``', txt, re.M)) - self.assertTrue(re.search(r'^\*gzip\*', txt, re.M)) - self.assertTrue(re.search(r'^``1.5``\s+``foss/2018a``,\s+``intel/2018a``', txt, re.M)) + # put dummy Core/GCC/4.6.3 in place + modpath = os.path.join(self.test_prefix, 'modules') + write_file(os.path.join(modpath, 'Core', 'GCC', '4.6.3'), '#%Module') + self.modtool.use(modpath) - args = [ - '--list-installed-software', - '--output-format=rst', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, testing=False, raise_error=True) - self.assertTrue(re.search(r'== Processed 5/5 easyconfigs...', txt, re.M)) - self.assertTrue(re.search(r'== Found 2 different software packages', txt, re.M)) - self.assertTrue(re.search(r'== Retained 1 installed software packages', txt, re.M)) - self.assertTrue(re.search(r'^\* GCC', txt, re.M)) - self.assertFalse(re.search(r'gzip', txt, re.M)) + # test with different module naming scheme active + # (see https://github.com/easybuilders/easybuild-framework/issues/3265) + for mns in ['EasyBuildMNS', 'HierarchicalMNS']: - args = [ - '--list-installed-software=detailed', - '--robot-paths=%s' % test_ecs, - ] - txt, _ = self._run_mock_eb(args, testing=False) - self.assertTrue(re.search(r'^== Retained 1 installed software packages', txt, re.M)) - self.assertTrue(re.search(r'^\* GCC', txt, re.M)) - self.assertTrue(re.search(r'^\s+\* GCC v4.6.3: system', txt, re.M)) - self.assertFalse(re.search(r'gzip', txt, re.M)) + args = [ + '--list-software', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, do_build=True, raise_error=True, testing=False, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r"^\* CrayCCE", + r"^\* GCC", + r"^\* gzip", + r"^\* HPL", + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + args = [ + '--list-software=detailed', + '--output-format=rst', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, testing=False, raise_error=True, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r'^\*CrayCCE\*', + r'^``5.1.29``\s+``system``', + r'^\*GCC\*', + r'^``4.6.3``\s+``system``', + r'^\*gzip\*', + r'^``1.4`` ``GCC/4.6.3``, ``system``', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + args = [ + '--list-installed-software', + '--output-format=rst', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, testing=False, raise_error=True, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r"^== Retained 1 installed software packages", + r'^\* GCC', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + self.assertFalse(re.search(r'gzip', txt, re.M)) + self.assertFalse(re.search(r'CrayCCE', txt, re.M)) + + args = [ + '--list-installed-software=detailed', + '--robot-paths=%s' % test_ecs, + '--module-naming-scheme=%s' % mns, + ] + txt, _ = self._run_mock_eb(args, testing=False, raise_error=True, verbose=True) + + patterns = [ + r"^.*\s*== Processed 5/5 easyconfigs...", + r"^== Found 4 different software packages", + r"^== Retained 1 installed software packages", + r'^\* GCC', + r'^\s+\* GCC v4.6.3: system', + ] + for pattern in patterns: + regex = re.compile(pattern, re.M) + self.assertTrue(regex.search(txt), "Pattern '%s' found in: %s" % (regex.pattern, txt)) + + self.assertFalse(re.search(r'gzip', txt, re.M)) + self.assertFalse(re.search(r'CrayCCE', txt, re.M)) def test_parse_optarch(self): """Test correct parsing of optarch option.""" diff --git a/test/framework/run.py b/test/framework/run.py index e7d608c7b2..5150838d80 100644 --- a/test/framework/run.py +++ b/test/framework/run.py @@ -206,22 +206,27 @@ def test_run_cmd_negative_exit_code(self): def handler(signum, _): raise RuntimeError("Signal handler called with signal %s" % signum) - # set the signal handler and a 3-second alarm - signal.signal(signal.SIGALRM, handler) - signal.alarm(3) + orig_sigalrm_handler = signal.getsignal(signal.SIGALRM) - (_, ec) = run_cmd("kill -9 $$", log_ok=False) - self.assertEqual(ec, -9) + try: + # set the signal handler and a 3-second alarm + signal.signal(signal.SIGALRM, handler) + signal.alarm(3) - # reset the alarm - signal.alarm(0) - signal.alarm(3) + (_, ec) = run_cmd("kill -9 $$", log_ok=False) + self.assertEqual(ec, -9) - (_, ec) = run_cmd_qa("kill -9 $$", {}, log_ok=False) - self.assertEqual(ec, -9) + # reset the alarm + signal.alarm(0) + signal.alarm(3) - # disable the alarm - signal.alarm(0) + (_, ec) = run_cmd_qa("kill -9 $$", {}, log_ok=False) + self.assertEqual(ec, -9) + + finally: + # cleanup: disable the alarm + reset signal handler for SIGALRM + signal.signal(signal.SIGALRM, orig_sigalrm_handler) + signal.alarm(0) def test_run_cmd_bis(self): """More 'complex' test for run_cmd function.""" diff --git a/test/framework/sandbox/easybuild/easyblocks/t/toy.py b/test/framework/sandbox/easybuild/easyblocks/t/toy.py index 79f3d3d8fc..11b2dff542 100644 --- a/test/framework/sandbox/easybuild/easyblocks/t/toy.py +++ b/test/framework/sandbox/easybuild/easyblocks/t/toy.py @@ -46,7 +46,7 @@ class EB_toy(ExtensionEasyBlock): @staticmethod def extra_options(extra_vars=None): - """Custom easyconfig parameters for toytoy.""" + """Custom easyconfig parameters for toy.""" if extra_vars is None: extra_vars = {} diff --git a/test/framework/toolchain.py b/test/framework/toolchain.py index 2b0fc84634..4ed54ead66 100644 --- a/test/framework/toolchain.py +++ b/test/framework/toolchain.py @@ -40,6 +40,7 @@ from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered, find_full_path, init_config import easybuild.tools.modules as modules +import easybuild.tools.toolchain as toolchain import easybuild.tools.toolchain.compiler from easybuild.framework.easyconfig.easyconfig import EasyConfig, ActiveMNS from easybuild.toolchains.system import SystemToolchain @@ -49,6 +50,7 @@ from easybuild.tools.filetools import adjust_permissions, copy_dir, find_eb_script, mkdir, read_file, write_file, which from easybuild.tools.py2vs3 import string_type from easybuild.tools.run import run_cmd +from easybuild.tools.toolchain.mpi import get_mpi_cmd_template from easybuild.tools.toolchain.toolchain import env_vars_external_module from easybuild.tools.toolchain.utilities import get_toolchain, search_toolchain @@ -1027,6 +1029,40 @@ def test_mpi_cmd_for(self): error_pattern = "Failed to complete MPI cmd template .* with .*: KeyError 'foo'" self.assertErrorRegex(EasyBuildError, error_pattern, tc.mpi_cmd_for, 'test', 1) + def test_get_mpi_cmd_template(self): + """Test get_mpi_cmd_template function.""" + + # search_toolchain needs to be called once to make sure constants like toolchain.OPENMPI are in place + search_toolchain('') + + input_params = {'nr_ranks': 123, 'cmd': 'this_is_just_a_test'} + + for mpi_fam in [toolchain.OPENMPI, toolchain.MPICH, toolchain.MPICH2, toolchain.MVAPICH2]: + mpi_cmd_tmpl, params = get_mpi_cmd_template(mpi_fam, input_params) + self.assertEqual(mpi_cmd_tmpl, "mpirun -n %(nr_ranks)s %(cmd)s") + self.assertEqual(params, input_params) + + # Intel MPI is a special case, also requires MPI version to be known + impi = toolchain.INTELMPI + error_pattern = "Intel MPI version unknown, can't determine MPI command template!" + self.assertErrorRegex(EasyBuildError, error_pattern, get_mpi_cmd_template, impi, {}) + + mpi_cmd_tmpl, params = get_mpi_cmd_template(toolchain.INTELMPI, input_params, mpi_version='1.0') + self.assertEqual(mpi_cmd_tmpl, "mpirun %(mpdbf)s %(nodesfile)s -np %(nr_ranks)s %(cmd)s") + self.assertEqual(sorted(params.keys()), ['cmd', 'mpdbf', 'nodesfile', 'nr_ranks']) + self.assertEqual(params['cmd'], 'this_is_just_a_test') + self.assertEqual(params['nr_ranks'], 123) + + mpdbf = params['mpdbf'] + regex = re.compile('^--file=.*/mpdboot$') + self.assertTrue(regex.match(mpdbf), "'%s' should match pattern '%s'" % (mpdbf, regex.pattern)) + self.assertTrue(os.path.exists(mpdbf.split('=')[1])) + + nodesfile = params['nodesfile'] + regex = re.compile('^-machinefile /.*/nodes$') + self.assertTrue(regex.match(nodesfile), "'%s' should match pattern '%s'" % (nodesfile, regex.pattern)) + self.assertTrue(os.path.exists(nodesfile.split(' ')[1])) + def test_prepare_deps(self): """Test preparing for a toolchain when dependencies are involved.""" tc = self.get_toolchain('GCC', version='6.4.0-2.28') diff --git a/test/framework/toy_build.py b/test/framework/toy_build.py index 1570504205..5f519fe31b 100644 --- a/test/framework/toy_build.py +++ b/test/framework/toy_build.py @@ -1,4 +1,5 @@ -# # +# -*- coding: utf-8 -*- +## # Copyright 2013-2020 Ghent University # # This file is part of EasyBuild, @@ -50,10 +51,11 @@ from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import get_module_syntax, get_repositorypath from easybuild.tools.environment import modify_env -from easybuild.tools.filetools import adjust_permissions, mkdir, read_file, remove_dir, remove_file, which, write_file +from easybuild.tools.filetools import adjust_permissions, change_dir, mkdir, read_file, remove_dir, remove_file +from easybuild.tools.filetools import which, write_file from easybuild.tools.module_generator import ModuleGeneratorTcl from easybuild.tools.modules import Lmod -from easybuild.tools.py2vs3 import string_type +from easybuild.tools.py2vs3 import reload, string_type from easybuild.tools.run import run_cmd from easybuild.tools.version import VERSION as EASYBUILD_VERSION @@ -73,7 +75,31 @@ def setUp(self): def tearDown(self): """Cleanup.""" + + # kick out any paths for included easyblocks from sys.path, + # to avoid infected any other tests + for path in sys.path[:]: + if '/included-easyblocks' in path: + sys.path.remove(path) + + # reload toy easyblock (and generic toy_extension easyblock that imports it) after cleaning up sys.path, + # to avoid trouble in other tests due to included toy easyblock that is cached somewhere + # (despite the cleanup in sys.modules); + # important for tests that include a customised copy of the toy easyblock + # (like test_toy_build_enhanced_sanity_check) + import easybuild.easyblocks.toy + reload(easybuild.easyblocks.toy) + import easybuild.easyblocks.toytoy + reload(easybuild.easyblocks.toytoy) + import easybuild.easyblocks.generic.toy_extension + reload(easybuild.easyblocks.generic.toy_extension) + + del sys.modules['easybuild.easyblocks.toy'] + del sys.modules['easybuild.easyblocks.toytoy'] + del sys.modules['easybuild.easyblocks.generic.toy_extension'] + super(ToyBuildTest, self).tearDown() + # remove logs if os.path.exists(self.dummylogfn): os.remove(self.dummylogfn) @@ -117,7 +143,8 @@ def check_toy(self, installpath, outtxt, version='0.0', versionprefix='', versio self.assertTrue(os.path.exists(devel_module_path)) def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True, fails=False, verbose=True, - raise_error=False, test_report=None, versionsuffix='', testing=True): + raise_error=False, test_report=None, versionsuffix='', testing=True, + raise_systemexit=False): """Perform a toy build.""" if extra_args is None: extra_args = [] @@ -144,7 +171,7 @@ def test_toy_build(self, extra_args=None, ec_file=None, tmpdir=None, verify=True myerr = None try: outtxt = self.eb_main(args, logfile=self.dummylogfn, do_build=True, verbose=verbose, - raise_error=raise_error, testing=testing) + raise_error=raise_error, testing=testing, raise_systemexit=raise_systemexit) except Exception as err: myerr = err if raise_error: @@ -1337,7 +1364,7 @@ def test_external_dependencies(self): write_file(toy_ec, ectxt + extraectxt) if isinstance(self.modtool, Lmod): - err_msg = r"Module command \\'module load nosuchbuilddep/0.0.0\\' failed" + err_msg = r"Module command \\'.*load nosuchbuilddep/0.0.0\\' failed" else: err_msg = r"Unable to locate a modulefile for 'nosuchbuilddep/0.0.0'" @@ -1349,7 +1376,7 @@ def test_external_dependencies(self): write_file(toy_ec, ectxt + extraectxt) if isinstance(self.modtool, Lmod): - err_msg = r"Module command \\'module load nosuchmodule/1.2.3\\' failed" + err_msg = r"Module command \\'.*load nosuchmodule/1.2.3\\' failed" else: err_msg = r"Unable to locate a modulefile for 'nosuchmodule/1.2.3'" @@ -1887,6 +1914,169 @@ def test_sanity_check_paths_lib64(self): write_file(test_ec, ectxt) self.test_toy_build(ec_file=test_ec, raise_error=True) + def test_toy_build_enhanced_sanity_check(self): + """Test enhancing of sanity check.""" + + # if toy easyblock was imported, get rid of corresponding entry in sys.modules, + # to avoid that it messes up the use of --include-easyblocks=toy.py below... + if 'easybuild.easyblocks.toy' in sys.modules: + del sys.modules['easybuild.easyblocks.toy'] + + test_dir = os.path.join(os.path.abspath(os.path.dirname(__file__))) + toy_ec = os.path.join(test_dir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + toy_ec_txt = read_file(toy_ec) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + + # get rid of custom sanity check paths in test easyconfig + regex = re.compile(r'^sanity_check_paths\s*=\s*{[^}]+}', re.M) + test_ec_txt = regex.sub('', toy_ec_txt) + write_file(test_ec, test_ec_txt) + + self.assertFalse('sanity_check_' in test_ec_txt) + + # create custom easyblock for toy that has a custom sanity_check_step + toy_easyblock = os.path.join(test_dir, 'sandbox', 'easybuild', 'easyblocks', 't', 'toy.py') + + toy_easyblock_txt = read_file(toy_easyblock) + + toy_custom_sanity_check_step = '\n'.join([ + '', + " def sanity_check_step(self):", + " paths = {", + " 'files': ['bin/toy'],", + " 'dirs': [],", + " }", + " cmds = ['toy']", + " return super(EB_toy, self).sanity_check_step(custom_paths=paths, custom_commands=cmds)", + ]) + test_toy_easyblock = os.path.join(self.test_prefix, 'toy.py') + write_file(test_toy_easyblock, toy_easyblock_txt + toy_custom_sanity_check_step) + + eb_args = [ + '--extended-dry-run', + '--include-easyblocks=%s' % test_toy_easyblock, + ] + + # by default, sanity check commands & paths specified by easyblock are used + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + pattern_lines = [ + r"Sanity check paths - file.*", + r"\s*\* bin/toy", + r"Sanity check paths - \(non-empty\) directory.*", + r"\s*\(none\)", + r"Sanity check commands", + r"\s*\* toy", + r'', + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + # we need to manually wipe the entry for the included toy easyblock, + # to avoid trouble with subsequent EasyBuild sessions in this test + del sys.modules['easybuild.easyblocks.toy'] + + # easyconfig specifies custom sanity_check_paths & sanity_check_commands, + # the ones defined by the easyblock are skipped by default + test_ec_txt = test_ec_txt + '\n'.join([ + '', + "sanity_check_paths = {", + " 'files': ['README'],", + " 'dirs': ['bin/']", + "}", + "sanity_check_commands = ['ls %(installdir)s']", + ]) + write_file(test_ec, test_ec_txt) + + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + pattern_lines = [ + r"Sanity check paths - file.*", + r"\s*\* README", + r"Sanity check paths - \(non-empty\) directory.*", + r"\s*\* bin/", + r"Sanity check commands", + r"\s*\* ls .*/software/toy/0.0", + r'', + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + del sys.modules['easybuild.easyblocks.toy'] + + # if enhance_sanity_check is enabled, then sanity check paths/commands specified in easyconfigs + # are used in addition to those defined in easyblock + test_ec_txt = test_ec_txt + '\nenhance_sanity_check = True' + write_file(test_ec, test_ec_txt) + + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + # now 'bin/toy' file and 'toy' command should also be part of sanity check + pattern_lines = [ + r"Sanity check paths - file.*", + r"\s*\* README", + r"\s*\* bin/toy", + r"Sanity check paths - \(non-empty\) directory.*", + r"\s*\* bin/", + r"Sanity check commands", + r"\s*\* ls .*/software/toy/0.0", + r"\s*\* toy", + r'', + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + del sys.modules['easybuild.easyblocks.toy'] + + # sanity_check_paths with only one key is allowed if enhance_sanity_check is enabled; + test_ec_txt = test_ec_txt + "\nsanity_check_paths = {'files': ['README']}" + write_file(test_ec, test_ec_txt) + + # we need to do a non-dry run here, to ensure the code we want to test is triggered + # (EasyConfig.dump called by 'reproduce_build' function from 'build_and_install_one') + eb_args = [ + '--include-easyblocks=%s' % test_toy_easyblock, + '--trace', + ] + + self.mock_stdout(True) + self.test_toy_build(ec_file=test_ec, extra_args=eb_args, verify=False, testing=False, raise_error=True) + stdout = self.get_stdout() + self.mock_stdout(False) + + pattern_lines = [ + r"^== sanity checking\.\.\.", + r" >> file 'bin/toy' found: OK", + ] + regex = re.compile(r'\n'.join(pattern_lines), re.M) + self.assertTrue(regex.search(stdout), "Pattern '%s' should be found in: %s" % (regex.pattern, stdout)) + + # no directories are checked in sanity check now, only files (since dirs is an empty list) + regex = re.compile(r"directory .* found:", re.M) + self.assertFalse(regex.search(stdout), "Pattern '%s' should be not found in: %s" % (regex.pattern, stdout)) + + del sys.modules['easybuild.easyblocks.toy'] + + # if enhance_sanity_check is disabled, both files/dirs keys are strictly required in sanity_check_paths + test_ec_txt = test_ec_txt + '\nenhance_sanity_check = False' + write_file(test_ec, test_ec_txt) + + error_pattern = " Missing mandatory key 'dirs' in sanity_check_paths." + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, ec_file=test_ec, + extra_args=eb_args, raise_error=True, verbose=False) + + del sys.modules['easybuild.easyblocks.toy'] + def test_toy_dumped_easyconfig(self): """ Test dumping of file in eb_filerepo in both .eb and .yeb format """ filename = 'toy-0.0' @@ -2068,9 +2258,16 @@ def test_toy_modaltsoftname(self): def test_toy_build_trace(self): """Test use of --trace""" + + topdir = os.path.dirname(os.path.abspath(__file__)) + toy_ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb') + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, read_file(toy_ec_file) + '\nsanity_check_commands = ["toy"]') + self.mock_stderr(True) self.mock_stdout(True) - self.test_toy_build(extra_args=['--trace', '--experimental'], verify=False, testing=False) + self.test_toy_build(ec_file=test_ec, extra_args=['--trace', '--experimental'], verify=False, testing=False) stderr = self.get_stderr() stdout = self.get_stdout() self.mock_stderr(False) @@ -2095,6 +2292,8 @@ def test_toy_build_trace(self): r"== sanity checking\.\.\.", r" >> file 'bin/yot' or 'bin/toy' found: OK", r" >> \(non-empty\) directory 'bin' found: OK", + r" >> running command 'toy' \.\.\.", + r" >> result for command 'toy': OK", ]) + r'$', r"^== creating module\.\.\.\n >> generating module file @ .*/modules/all/toy/0\.0(?:\.lua)?$", ] @@ -2545,8 +2744,10 @@ def test_toy_build_lock(self): # also test use of --ignore-locks self.test_toy_build(extra_args=extra_args + ['--ignore-locks'], verify=True, raise_error=True) + orig_sigalrm_handler = signal.getsignal(signal.SIGALRM) + # define a context manager that remove a lock after a while, so we can check the use of --wait-for-lock - class remove_lock_after: + class remove_lock_after(object): def __init__(self, seconds, lock_fp): self.seconds = seconds self.lock_fp = lock_fp @@ -2559,45 +2760,90 @@ def __enter__(self): signal.alarm(self.seconds) def __exit__(self, type, value, traceback): - pass + # clean up SIGALRM signal handler, and cancel scheduled alarm + signal.signal(signal.SIGALRM, orig_sigalrm_handler) + signal.alarm(0) - # wait for lock to be removed, with 1 second interval of checking - extra_args.append('--wait-on-lock=1') + # wait for lock to be removed, with 1 second interval of checking; + # check with both --wait-on-lock-interval and deprecated --wait-on-lock options wait_regex = re.compile("^== lock .*_software_toy_0.0.lock exists, waiting 1 seconds", re.M) ok_regex = re.compile("^== COMPLETED: Installation ended successfully", re.M) - self.assertTrue(os.path.exists(toy_lock_path)) + test_cases = [ + ['--wait-on-lock=1'], + ['--wait-on-lock=1', '--wait-on-lock-interval=60'], + ['--wait-on-lock=100', '--wait-on-lock-interval=1'], + ['--wait-on-lock-limit=100', '--wait-on-lock=1'], + ['--wait-on-lock-limit=100', '--wait-on-lock-interval=1'], + ['--wait-on-lock-limit=-1', '--wait-on-lock=1'], + ['--wait-on-lock-limit=-1', '--wait-on-lock-interval=1'], + ] - # use context manager to remove lock after 3 seconds - with remove_lock_after(3, toy_lock_path): - self.mock_stderr(True) - self.mock_stdout(True) - self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) - stderr, stdout = self.get_stderr(), self.get_stdout() - self.mock_stderr(False) - self.mock_stdout(False) + for opts in test_cases: - self.assertEqual(stderr, '') + if any('--wait-on-lock=' in x for x in opts): + self.allow_deprecated_behaviour() + else: + self.disallow_deprecated_behaviour() - wait_matches = wait_regex.findall(stdout) - # we can't rely on an exact number of 'waiting' messages, so let's go with a range... - self.assertTrue(len(wait_matches) in range(2, 5)) + if not os.path.exists(toy_lock_path): + mkdir(toy_lock_path) - self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + self.assertTrue(os.path.exists(toy_lock_path)) + + all_args = extra_args + opts + + # use context manager to remove lock after 3 seconds + with remove_lock_after(3, toy_lock_path): + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=all_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) - # when there is no lock in place, --wait-on-lock has no impact - self.assertFalse(os.path.exists(toy_lock_path)) + if any('--wait-on-lock=' in x for x in all_args): + self.assertTrue("Use of --wait-on-lock is deprecated" in stderr) + else: + self.assertEqual(stderr, '') + + wait_matches = wait_regex.findall(stdout) + # we can't rely on an exact number of 'waiting' messages, so let's go with a range... + self.assertTrue(len(wait_matches) in range(2, 5)) + + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + + # check use of --wait-on-lock-limit: if lock is never removed, we should give up when limit is reached + mkdir(toy_lock_path) + all_args = extra_args + ['--wait-on-lock-limit=3', '--wait-on-lock-interval=1'] self.mock_stderr(True) self.mock_stdout(True) - self.test_toy_build(extra_args=extra_args, verify=False, raise_error=True, testing=False) + error_pattern = r"Maximum wait time for lock /.*toy_0.0.lock to be released reached: [0-9]+ sec >= 3 sec" + self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, extra_args=all_args, + verify=False, raise_error=True, testing=False) stderr, stdout = self.get_stderr(), self.get_stdout() self.mock_stderr(False) self.mock_stdout(False) - self.assertEqual(stderr, '') - self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) - self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout)) + wait_matches = wait_regex.findall(stdout) + self.assertTrue(len(wait_matches) in range(2, 5)) + + # when there is no lock in place, --wait-on-lock* has no impact + remove_dir(toy_lock_path) + for opt in ['--wait-on-lock=1', '--wait-on-lock-limit=3', '--wait-on-lock-interval=1']: + all_args = extra_args + [opt] + self.assertFalse(os.path.exists(toy_lock_path)) + self.mock_stderr(True) + self.mock_stdout(True) + self.test_toy_build(extra_args=all_args, verify=False, raise_error=True, testing=False) + stderr, stdout = self.get_stderr(), self.get_stdout() + self.mock_stderr(False) + self.mock_stdout(False) + + self.assertEqual(stderr, '') + self.assertTrue(ok_regex.search(stdout), "Pattern '%s' found in: %s" % (ok_regex.pattern, stdout)) + self.assertFalse(wait_regex.search(stdout), "Pattern '%s' not found in: %s" % (wait_regex.pattern, stdout)) # check for clean error on creation of lock extra_args = ['--locks-dir=/'] @@ -2606,6 +2852,91 @@ def __exit__(self, type, value, traceback): self.assertErrorRegex(EasyBuildError, error_pattern, self.test_toy_build, extra_args=extra_args, raise_error=True, verbose=False) + def test_toy_lock_cleanup_signals(self): + """Test cleanup of locks after EasyBuild session gets a cancellation signal.""" + + orig_wd = os.getcwd() + + locks_dir = os.path.join(self.test_installpath, 'software', '.locks') + self.assertFalse(os.path.exists(locks_dir)) + + orig_sigalrm_handler = signal.getsignal(signal.SIGALRM) + + # context manager which stops the function being called with the specified signal + class wait_and_signal(object): + def __init__(self, seconds, signum): + self.seconds = seconds + self.signum = signum + + def send_signal(self, *args): + os.kill(os.getpid(), self.signum) + + def __enter__(self): + signal.signal(signal.SIGALRM, self.send_signal) + signal.alarm(self.seconds) + + def __exit__(self, type, value, traceback): + # clean up SIGALRM signal handler, and cancel scheduled alarm + signal.signal(signal.SIGALRM, orig_sigalrm_handler) + signal.alarm(0) + + # add extra sleep command to ensure session takes long enough + test_ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') + toy_ec_txt = read_file(os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb')) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, toy_ec_txt + '\npostinstallcmds = ["sleep 5"]') + + signums = [ + (signal.SIGABRT, SystemExit), + (signal.SIGINT, KeyboardInterrupt), + (signal.SIGTERM, SystemExit), + (signal.SIGQUIT, SystemExit), + ] + for (signum, exc) in signums: + + # avoid recycling stderr of previous test + stderr = '' + + with wait_and_signal(1, signum): + + # change back to original working directory before each test + change_dir(orig_wd) + + self.mock_stderr(True) + self.mock_stdout(True) + self.assertErrorRegex(exc, '.*', self.test_toy_build, ec_file=test_ec, verify=False, + raise_error=True, testing=False, raise_systemexit=True) + + stderr = self.get_stderr().strip() + self.mock_stderr(False) + self.mock_stdout(False) + + pattern = r"^WARNING: signal received \(%s\), " % int(signum) + pattern += r"cleaning up locks \(.*software_toy_0.0\)\.\.\." + regex = re.compile(pattern) + self.assertTrue(regex.search(stderr), "Pattern '%s' found in: %s" % (regex.pattern, stderr)) + + def test_toy_build_unicode_description(self): + """Test installation of easyconfig file that has non-ASCII characters in description.""" + # cfr. https://github.com/easybuilders/easybuild-framework/issues/3284 + + test_ecs_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs') + toy_ec = os.path.join(test_ecs_dir, 't', 'toy', 'toy-0.0.eb') + toy_ec_txt = read_file(toy_ec) + + # the tilde character included here is a Unicode tilde character, not a regular ASCII tilde (~) + descr = "This description includes a unicode tilde character: ∼, for your entertainment." + self.assertFalse('~' in descr) + + regex = re.compile(r'^description\s*=.*', re.M) + test_ec_txt = regex.sub(r'description = "%s"' % descr, toy_ec_txt) + + test_ec = os.path.join(self.test_prefix, 'test.eb') + write_file(test_ec, test_ec_txt) + + self.test_toy_build(ec_file=test_ec, raise_error=True) + def suite(): """ return all the tests in this file """ diff --git a/test/framework/tweak.py b/test/framework/tweak.py index c432a86ceb..16cd6ee31e 100644 --- a/test/framework/tweak.py +++ b/test/framework/tweak.py @@ -472,8 +472,7 @@ def test_map_easyconfig_to_target_tc_hierarchy(self): update_build_specs={'version': new_version}, update_dep_versions=False) tweaked_ec = process_easyconfig(tweaked_spec)[0] - tweaked_dict = tweaked_ec['ec'].asdict() - extensions = tweaked_dict['exts_list'] + extensions = tweaked_ec['ec']['exts_list'] # check one extension with the same name exists and that the version has been updated hit_extension = 0 for extension in extensions: diff --git a/test/framework/utilities.py b/test/framework/utilities.py index 2c2bd73ffb..1d98dbcced 100644 --- a/test/framework/utilities.py +++ b/test/framework/utilities.py @@ -125,9 +125,8 @@ def setUp(self): os.environ['EASYBUILD_ROBOT_PATHS'] = os.path.join(testdir, 'easyconfigs', 'test_ecs') # make sure no deprecated behaviour is being triggered (unless intended by the test) - # trip *all* log.deprecated statements by setting deprecation version ridiculously high self.orig_current_version = eb_build_log.CURRENT_VERSION - os.environ['EASYBUILD_DEPRECATED'] = '10000000' + self.disallow_deprecated_behaviour() init_config() @@ -181,6 +180,11 @@ def setUp(self): self.reset_modulepath([os.path.join(testdir, 'modules')]) reset_module_caches() + def disallow_deprecated_behaviour(self): + """trip *all* log.deprecated statements by setting deprecation version ridiculously high""" + os.environ['EASYBUILD_DEPRECATED'] = '10000000' + eb_build_log.CURRENT_VERSION = os.environ['EASYBUILD_DEPRECATED'] + def allow_deprecated_behaviour(self): """Restore EasyBuild version to what it was originally, to allow triggering deprecated behaviour.""" if 'EASYBUILD_DEPRECATED' in os.environ: