diff --git a/configure.py b/configure.py index a0deed59c1f3df..466e5d0aa7dbac 100755 --- a/configure.py +++ b/configure.py @@ -12,11 +12,12 @@ import shutil import bz2 import io +import pathlib from distutils.version import StrictVersion # If not run from node/, cd to node/. -os.chdir(os.path.dirname(__file__) or '.') +os.chdir(pathlib.Path(__file__).parent or '.') original_argv = sys.argv[1:] @@ -25,11 +26,11 @@ CC = os.environ.get('CC', 'cc' if sys.platform == 'darwin' else 'gcc') CXX = os.environ.get('CXX', 'c++' if sys.platform == 'darwin' else 'g++') -sys.path.insert(0, os.path.join('tools', 'gyp', 'pylib')) +sys.path.insert(0, str(pathlib.Path('tools', 'gyp', 'pylib'))) from gyp.common import GetFlavor # imports in tools/configure.d -sys.path.insert(0, os.path.join('tools', 'configure.d')) +sys.path.insert(0, str(pathlib.Path('tools', 'configure.d'))) import nodedownload # imports in tools/ @@ -53,7 +54,7 @@ valid_mips_fpu = ('fp32', 'fp64', 'fpxx') valid_mips_float_abi = ('soft', 'hard') valid_intl_modes = ('none', 'small-icu', 'full-icu', 'system-icu') -with open ('tools/icu/icu_versions.json') as f: +with pathlib.Path('tools', 'icu', 'icu_versions.json').open(encoding='utf-8') as f: icu_versions = json.load(f) shareable_builtins = {'cjs_module_lexer/lexer': 'deps/cjs-module-lexer/lexer.js', @@ -108,7 +109,7 @@ action='store', dest='dest_cpu', choices=valid_arch, - help='CPU architecture to build for ({0})'.format(', '.join(valid_arch))) + help=f"CPU architecture to build for ({', '.join(valid_arch)})") parser.add_argument('--cross-compiling', action='store_true', @@ -125,7 +126,7 @@ action='store', dest='dest_os', choices=valid_os, - help='operating system to build for ({0})'.format(', '.join(valid_os))) + help=f"operating system to build for ({', '.join(valid_os)})") parser.add_argument('--error-on-warn', action='store_true', @@ -510,39 +511,34 @@ action='store', dest='arm_float_abi', choices=valid_arm_float_abi, - help='specifies which floating-point ABI to use ({0}).'.format( - ', '.join(valid_arm_float_abi))) + help=f"specifies which floating-point ABI to use ({', '.join(valid_arm_float_abi)}).") parser.add_argument('--with-arm-fpu', action='store', dest='arm_fpu', choices=valid_arm_fpu, - help='ARM FPU mode ({0}) [default: %(default)s]'.format( - ', '.join(valid_arm_fpu))) + help=f"ARM FPU mode ({', '.join(valid_arm_fpu)}) [default: %(default)s]") parser.add_argument('--with-mips-arch-variant', action='store', dest='mips_arch_variant', default='r2', choices=valid_mips_arch, - help='MIPS arch variant ({0}) [default: %(default)s]'.format( - ', '.join(valid_mips_arch))) + help=f"MIPS arch variant ({', '.join(valid_mips_arch)}) [default: %(default)s]") parser.add_argument('--with-mips-fpu-mode', action='store', dest='mips_fpu_mode', default='fp32', choices=valid_mips_fpu, - help='MIPS FPU mode ({0}) [default: %(default)s]'.format( - ', '.join(valid_mips_fpu))) + help=f"MIPS FPU mode ({', '.join(valid_mips_fpu)}) [default: %(default)s]") parser.add_argument('--with-mips-float-abi', action='store', dest='mips_float_abi', default='hard', choices=valid_mips_float_abi, - help='MIPS floating-point ABI ({0}) [default: %(default)s]'.format( - ', '.join(valid_mips_float_abi))) + help=f"MIPS floating-point ABI ({', '.join(valid_mips_float_abi)}) [default: %(default)s]") parser.add_argument('--use-largepages', action='store_true', @@ -569,8 +565,7 @@ dest='with_intl', default='full-icu', choices=valid_intl_modes, - help='Intl mode (valid choices: {0}) [default: %(default)s]'.format( - ', '.join(valid_intl_modes))) + help=f"Intl mode (valid choices: {', '.join(valid_intl_modes)}) [default: %(default)s]") intl_optgroup.add_argument('--without-intl', action='store_const', @@ -597,7 +592,7 @@ dest='with_icu_source', help='Intl mode: optional local path to icu/ dir, or path/URL of ' 'the icu4c source archive. ' - 'v%d.x or later recommended.' % icu_versions['minimum_icu']) + f"v{icu_versions['minimum_icu']}.x or later recommended.") intl_optgroup.add_argument('--with-icu-default-data-dir', action='store', @@ -845,7 +840,7 @@ (options, args) = parser.parse_known_args() # Expand ~ in the install prefix now, it gets written to multiple files. -options.prefix = os.path.expanduser(options.prefix or '') +options.prefix = str(pathlib.Path(options.prefix or '').expanduser()) # set up auto-download list auto_downloads = nodedownload.parse(options.download_list) @@ -853,25 +848,25 @@ def error(msg): prefix = '\033[1m\033[31mERROR\033[0m' if os.isatty(1) else 'ERROR' - print('%s: %s' % (prefix, msg)) + print(f'{prefix}: {msg}') sys.exit(1) def warn(msg): warn.warned = True prefix = '\033[1m\033[93mWARNING\033[0m' if os.isatty(1) else 'WARNING' - print('%s: %s' % (prefix, msg)) + print(f'{prefix}: {msg}') # track if warnings occurred warn.warned = False def info(msg): prefix = '\033[1m\033[32mINFO\033[0m' if os.isatty(1) else 'INFO' - print('%s: %s' % (prefix, msg)) + print(f'{prefix}: {msg}') def print_verbose(x): if not options.verbose: return - if type(x) is str: + if isinstance(x, str): print(x) else: pprint.pprint(x, indent=2) @@ -904,9 +899,11 @@ def pkg_config(pkg): try: proc = subprocess.Popen(shlex.split(pkg_config) + args, stdout=subprocess.PIPE) - val = to_utf8(proc.communicate()[0]).strip() + with proc: + val = to_utf8(proc.communicate()[0]).strip() except OSError as e: - if e.errno != errno.ENOENT: raise e # Unexpected error. + if e.errno != errno.ENOENT: + raise e # Unexpected error. return (None, None, None, None) # No pkg-config/pkgconf installed. retval.append(val) args = ['--silence-errors'] @@ -920,13 +917,14 @@ def try_check_compiler(cc, lang): except OSError: return (False, False, '', '') - proc.stdin.write(b'__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ ' - b'__clang_major__ __clang_minor__ __clang_patchlevel__') + with proc: + proc.stdin.write(b'__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ ' + b'__clang_major__ __clang_minor__ __clang_patchlevel__') - if sys.platform == 'zos': - values = (to_utf8(proc.communicate()[0]).split('\n')[-2].split() + ['0'] * 7)[0:7] - else: - values = (to_utf8(proc.communicate()[0]).split() + ['0'] * 7)[0:7] + if sys.platform == 'zos': + values = (to_utf8(proc.communicate()[0]).split('\n')[-2].split() + ['0'] * 7)[0:7] + else: + values = (to_utf8(proc.communicate()[0]).split() + ['0'] * 7)[0:7] is_clang = values[0] == '1' gcc_version = tuple(map(int, values[1:1+3])) @@ -952,12 +950,10 @@ def get_version_helper(cc, regexp): consider adjusting the CC environment variable if you installed it in a non-standard prefix.''') - match = re.search(regexp, to_utf8(proc.communicate()[1])) + with proc: + match = re.search(regexp, to_utf8(proc.communicate()[1])) - if match: - return match.group(2) - else: - return '0.0' + return match.group(2) if match else '0.0' def get_nasm_version(asm): try: @@ -970,13 +966,11 @@ def get_nasm_version(asm): and refer BUILDING.md.''') return '0.0' - match = re.match(r"NASM version ([2-9]\.[0-9][0-9]+)", - to_utf8(proc.communicate()[0])) + with proc: + match = re.match(r"NASM version ([2-9]\.[0-9][0-9]+)", + to_utf8(proc.communicate()[0])) - if match: - return match.group(1) - else: - return '0.0' + return match.group(1) if match else '0.0' def get_llvm_version(cc): return get_version_helper( @@ -1002,14 +996,16 @@ def get_gas_version(cc): consider adjusting the CC environment variable if you installed it in a non-standard prefix.''') - gas_ret = to_utf8(proc.communicate()[1]) + with proc: + gas_ret = to_utf8(proc.communicate()[1]) + match = re.match(r"GNU assembler version ([2-9]\.[0-9]+)", gas_ret) if match: return match.group(1) - else: - warn('Could not recognize `gas`: ' + gas_ret) - return '0.0' + + warn(f'Could not recognize `gas`: {gas_ret}') + return '0.0' # Note: Apple clang self-reports as clang 4.2.0 and gcc 4.2.1. It passes # the version check more by accident than anything else but a more rigorous @@ -1027,26 +1023,22 @@ def check_compiler(o): ok, is_clang, clang_version, gcc_version = try_check_compiler(CXX, 'c++') version_str = ".".join(map(str, clang_version if is_clang else gcc_version)) - print_verbose('Detected %sC++ compiler (CXX=%s) version: %s' % - ('clang ' if is_clang else '', CXX, version_str)) + print_verbose(f"Detected {'clang ' if is_clang else ''}C++ compiler (CXX={CXX}) version: {version_str}") if not ok: - warn('failed to autodetect C++ compiler version (CXX=%s)' % CXX) + warn(f'failed to autodetect C++ compiler version (CXX={CXX})') elif clang_version < (8, 0, 0) if is_clang else gcc_version < (10, 1, 0): - warn('C++ compiler (CXX=%s, %s) too old, need g++ 10.1.0 or clang++ 8.0.0' % - (CXX, version_str)) + warn(f'C++ compiler (CXX={CXX}, {version_str}) too old, need g++ 10.1.0 or clang++ 8.0.0') ok, is_clang, clang_version, gcc_version = try_check_compiler(CC, 'c') version_str = ".".join(map(str, clang_version if is_clang else gcc_version)) - print_verbose('Detected %sC compiler (CC=%s) version: %s' % - ('clang ' if is_clang else '', CC, version_str)) + print_verbose(f"Detected {'clang ' if is_clang else ''}C compiler (CC={CC}) version: {version_str}") if not ok: - warn('failed to autodetect C compiler version (CC=%s)' % CC) + warn(f'failed to autodetect C compiler version (CC={CC})') elif not is_clang and gcc_version < (4, 2, 0): # clang 3.2 is a little white lie because any clang version will probably # do for the C bits. However, we might as well encourage people to upgrade # to a version that is not completely ancient. - warn('C compiler (CC=%s, %s) too old, need gcc 4.2 or clang 3.2' % - (CC, version_str)) + warn(f'C compiler (CC={CC}, {version_str}) too old, need gcc 4.2 or clang 3.2') o['variables']['llvm_version'] = get_llvm_version(CC) if is_clang else '0.0' @@ -1076,8 +1068,9 @@ def cc_macros(cc=None): consider adjusting the CC environment variable if you installed it in a non-standard prefix.''') - p.stdin.write(b'\n') - out = to_utf8(p.communicate()[0]).split('\n') + with p: + p.stdin.write(b'\n') + out = to_utf8(p.communicate()[0]).split('\n') k = {} for line in out: @@ -1134,9 +1127,9 @@ def host_arch_cc(): rtn = 'ia32' # default - for i in matchup: - if i in k and k[i] != '0': - rtn = matchup[i] + for key, value in matchup.items(): + if k.get(key, 0) and k[key] != '0': + rtn = value break if rtn == 'mipsel' and '_LP64' in k: @@ -1195,7 +1188,7 @@ def configure_arm(o): def configure_mips(o, target_arch): - can_use_fpu_instructions = (options.mips_float_abi != 'soft') + can_use_fpu_instructions = options.mips_float_abi != 'soft' o['variables']['v8_can_use_fpu_instructions'] = b(can_use_fpu_instructions) o['variables']['v8_use_mips_abi_hardfloat'] = b(can_use_fpu_instructions) o['variables']['mips_arch_variant'] = options.mips_arch_variant @@ -1207,14 +1200,14 @@ def configure_zos(o): o['variables']['node_static_zoslib'] = b(True) if options.static_zoslib_gyp: # Apply to all Node.js components for now - o['variables']['zoslib_include_dir'] = os.path.dirname(options.static_zoslib_gyp) + '/include' + o['variables']['zoslib_include_dir'] = pathlib.Path(options.static_zoslib_gyp).parent + '/include' o['include_dirs'] += [o['variables']['zoslib_include_dir']] else: raise Exception('--static-zoslib-gyp= is required.') def clang_version_ge(version_checked): for compiler in [(CC, 'c'), (CXX, 'c++')]: - ok, is_clang, clang_version, gcc_version = \ + _, is_clang, clang_version, _1 = \ try_check_compiler(compiler[0], compiler[1]) if is_clang and clang_version >= version_checked: return True @@ -1222,7 +1215,7 @@ def clang_version_ge(version_checked): def gcc_version_ge(version_checked): for compiler in [(CC, 'c'), (CXX, 'c++')]: - ok, is_clang, clang_version, gcc_version = \ + _, is_clang, _1, gcc_version = \ try_check_compiler(compiler[0], compiler[1]) if is_clang or gcc_version < version_checked: return False @@ -1324,7 +1317,7 @@ def configure_node(o): version_checked_str = ".".join(map(str, version_checked)) raise Exception( 'The options --enable-pgo-generate and --enable-pgo-use ' - 'are supported for gcc and gxx %s or newer only.' % (version_checked_str)) + f'are supported for gcc and gxx {version_checked_str} or newer only.') if options.enable_pgo_generate and options.enable_pgo_use: raise Exception( @@ -1347,8 +1340,8 @@ def configure_node(o): gcc_version_checked_str = ".".join(map(str, gcc_version_checked)) clang_version_checked_str = ".".join(map(str, clang_version_checked)) raise Exception( - 'The option --enable-lto is supported for gcc %s+' - 'or clang %s+ only.' % (gcc_version_checked_str, clang_version_checked_str)) + f'The option --enable-lto is supported for gcc {gcc_version_checked_str}+' + f'or clang {clang_version_checked_str}+ only.') o['variables']['enable_lto'] = b(options.enable_lto) @@ -1458,15 +1451,15 @@ def configure_library(lib, output, pkgname=None): if 'msvs_settings' not in output: output['msvs_settings'] = { 'VCLinkerTool': { 'AdditionalOptions': [] } } output['msvs_settings']['VCLinkerTool']['AdditionalOptions'] += [ - '/LIBPATH:%s' % options.__dict__[shared_lib + '_libpath']] + f"/LIBPATH:{options.__dict__[shared_lib + '_libpath']}"] else: output['libraries'] += [ - '-L%s' % options.__dict__[shared_lib + '_libpath']] + f"-L{options.__dict__[shared_lib + '_libpath']}"] elif pkg_libpath: output['libraries'] += [pkg_libpath] default_libs = getattr(options, shared_lib + '_libname') - default_libs = ['-l{0}'.format(l) for l in default_libs.split(',')] + default_libs = [f'-l{l}' for l in default_libs.split(',')] if default_libs: output['libraries'] += default_libs @@ -1528,7 +1521,7 @@ def configure_openssl(o): if options.without_ssl: def without_ssl_error(option): - error('--without-ssl is incompatible with %s' % option) + error(f'--without-ssl is incompatible with {option}') if options.shared_openssl: without_ssl_error('--shared-openssl') if options.openssl_no_asm: @@ -1608,35 +1601,35 @@ def configure_static(o): def write(filename, data): - print_verbose('creating %s' % filename) - with open(filename, 'w+') as f: + print_verbose(f'creating {filename}') + with pathlib.Path(filename).open(mode='w+', encoding='utf-8') as f: f.write(data) do_not_edit = '# Do not edit. Generated by the configure script.\n' def glob_to_var(dir_base, dir_sub, patch_dir): - list = [] - dir_all = '%s/%s' % (dir_base, dir_sub) + file_list = [] + dir_all = f'{dir_base}/{dir_sub}' files = os.walk(dir_all) for ent in files: - (path, dirs, files) = ent + (_, _1, files) = ent for file in files: if file.endswith(('.cpp', '.c', '.h')): # srcfile uses "slash" as dir separator as its output is consumed by gyp - srcfile = '%s/%s' % (dir_sub, file) + srcfile = f'{dir_sub}/{file}' if patch_dir: - patchfile = '%s/%s/%s' % (dir_base, patch_dir, file) - if os.path.isfile(patchfile): - srcfile = '%s/%s' % (patch_dir, file) - info('Using floating patch "%s" from "%s"' % (patchfile, dir_base)) - list.append(srcfile) + patchfile = pathlib.Path(dir_base, patch_dir, file) + if patchfile.is_file(): + srcfile = f'{patch_dir}/{file}' + info(f'Using floating patch "{patchfile}" from "{dir_base}"') + file_list.append(srcfile) break - return list + return file_list def configure_intl(o): - def icu_download(path): - depFile = 'tools/icu/current_ver.dep' - with open(depFile) as f: + def icu_download(): + depFile = pathlib.Path('tools', 'icu', 'current_ver.dep') + with depFile.open(encoding='utf-8') as f: icus = json.load(f) # download ICU, if needed if not os.access(options.download_path, os.W_OK): @@ -1647,26 +1640,26 @@ def icu_download(path): url = icu['url'] (expectHash, hashAlgo, allAlgos) = nodedownload.findHash(icu) if not expectHash: - error('''Could not find a hash to verify ICU download. - %s may be incorrect. - For the entry %s, - Expected one of these keys: %s''' % (depFile, url, ' '.join(allAlgos))) + error(f'''Could not find a hash to verify ICU download. + {depFile} may be incorrect. + For the entry {url}, + Expected one of these keys: {' '.join(allAlgos)}''') local = url.split('/')[-1] - targetfile = os.path.join(options.download_path, local) - if not os.path.isfile(targetfile): + targetfile = pathlib.Path(options.download_path, local) + if not targetfile.is_file(): if attemptdownload: nodedownload.retrievefile(url, targetfile) else: - print('Re-using existing %s' % targetfile) - if os.path.isfile(targetfile): - print('Checking file integrity with %s:\r' % hashAlgo) + print(f'Re-using existing {targetfile}') + if targetfile.is_file(): + print(f'Checking file integrity with {hashAlgo}:\r') gotHash = nodedownload.checkHash(targetfile, hashAlgo) - print('%s: %s %s' % (hashAlgo, gotHash, targetfile)) - if (expectHash == gotHash): + print(f'{hashAlgo}: {gotHash} {targetfile}') + if expectHash == gotHash: return targetfile - else: - warn('Expected: %s *MISMATCH*' % expectHash) - warn('\n ** Corrupted ZIP? Delete %s to retry download.\n' % targetfile) + + warn(f'Expected: {expectHash} *MISMATCH*') + warn(f'\n ** Corrupted ZIP? Delete {targetfile} to retry download.\n') return None icu_config = { 'variables': {} @@ -1694,12 +1687,14 @@ def icu_download(path): # use the .gyp given o['variables']['icu_gyp_path'] = options.with_icu_path return + # --with-intl= # set the default if with_intl in (None, 'none'): o['variables']['v8_enable_i18n_support'] = 0 return # no Intl - elif with_intl == 'small-icu': + + if with_intl == 'small-icu': # small ICU (English only) o['variables']['v8_enable_i18n_support'] = 1 o['variables']['icu_small'] = b(True) @@ -1722,8 +1717,7 @@ def icu_download(path): icu_ver_major = icuversion.split('.')[0] o['variables']['icu_ver_major'] = icu_ver_major if int(icu_ver_major) < icu_versions['minimum_icu']: - error('icu4c v%s is too old, v%d.x or later is required.' % - (icuversion, icu_versions['minimum_icu'])) + error(f"icu4c v{icuversion} is too old, v{icu_versions['minimum_icu']}.x or later is required.") # libpath provides linker path which may contain spaces if libpath: o['libraries'] += [libpath] @@ -1744,16 +1738,16 @@ def icu_download(path): icu_full_path = icu_deps_path # icu-tmp is used to download and unpack the ICU tarball. - icu_tmp_path = os.path.join(icu_parent_path, 'icu-tmp') + icu_tmp_path = pathlib.Path(icu_parent_path, 'icu-tmp') # canned ICU. see tools/icu/README.md to update. canned_icu_dir = 'deps/icu-small' # use the README to verify what the canned ICU is - canned_is_full = os.path.isfile(os.path.join(canned_icu_dir, 'README-FULL-ICU.txt')) - canned_is_small = os.path.isfile(os.path.join(canned_icu_dir, 'README-SMALL-ICU.txt')) + canned_is_full = pathlib.Path(canned_icu_dir, 'README-FULL-ICU.txt').is_file() + canned_is_small = pathlib.Path(canned_icu_dir, 'README-SMALL-ICU.txt').is_file() if canned_is_small: - warn('Ignoring %s - in-repo small icu is no longer supported.' % canned_icu_dir) + warn(f'Ignoring {canned_icu_dir} - in-repo small icu is no longer supported.') # We can use 'deps/icu-small' - pre-canned ICU *iff* # - canned_is_full AND @@ -1771,66 +1765,65 @@ def icu_download(path): icu_config['variables']['icu_full_canned'] = 1 # --with-icu-source processing # now, check that they didn't pass --with-icu-source=deps/icu - elif with_icu_source and os.path.abspath(icu_full_path) == os.path.abspath(with_icu_source): - warn('Ignoring redundant --with-icu-source=%s' % with_icu_source) + elif with_icu_source and pathlib.Path(icu_full_path).resolve() == pathlib.Path(with_icu_source).resolve(): + warn(f'Ignoring redundant --with-icu-source={with_icu_source}') with_icu_source = None # if with_icu_source is still set, try to use it. if with_icu_source: - if os.path.isdir(icu_full_path): - print('Deleting old ICU source: %s' % icu_full_path) + if pathlib.Path(icu_full_path).is_dir(): + print(f'Deleting old ICU source: {icu_full_path}') shutil.rmtree(icu_full_path) # now, what path was given? - if os.path.isdir(with_icu_source): + if pathlib.Path(with_icu_source).is_dir(): # it's a path. Copy it. - print('%s -> %s' % (with_icu_source, icu_full_path)) + print(f'{with_icu_source} -> {icu_full_path}') shutil.copytree(with_icu_source, icu_full_path) else: # could be file or URL. # Set up temporary area - if os.path.isdir(icu_tmp_path): + if icu_tmp_path.is_dir(): shutil.rmtree(icu_tmp_path) - os.mkdir(icu_tmp_path) + icu_tmp_path.mkdir() icu_tarball = None - if os.path.isfile(with_icu_source): + if pathlib.Path(with_icu_source).is_file(): # it's a file. Try to unpack it. icu_tarball = with_icu_source else: # Can we download it? - local = os.path.join(icu_tmp_path, with_icu_source.split('/')[-1]) # local part + local = icu_tmp_path / with_icu_source.split('/')[-1] # local part icu_tarball = nodedownload.retrievefile(with_icu_source, local) # continue with "icu_tarball" nodedownload.unpack(icu_tarball, icu_tmp_path) # Did it unpack correctly? Should contain 'icu' - tmp_icu = os.path.join(icu_tmp_path, 'icu') - if os.path.isdir(tmp_icu): - os.rename(tmp_icu, icu_full_path) + tmp_icu = icu_tmp_path / 'icu' + if tmp_icu.is_dir(): + tmp_icu.rename(icu_full_path) shutil.rmtree(icu_tmp_path) else: shutil.rmtree(icu_tmp_path) - error('--with-icu-source=%s did not result in an "icu" dir.' % \ - with_icu_source) + error(f'--with-icu-source={with_icu_source} did not result in an "icu" dir.') # ICU mode. (icu-generic.gyp) o['variables']['icu_gyp_path'] = 'tools/icu/icu-generic.gyp' # ICU source dir relative to tools/icu (for .gyp file) o['variables']['icu_path'] = icu_full_path - if not os.path.isdir(icu_full_path): + if not pathlib.Path(icu_full_path).is_dir(): # can we download (or find) a zipfile? - localzip = icu_download(icu_full_path) + localzip = icu_download() if localzip: nodedownload.unpack(localzip, icu_parent_path) else: - warn('* ECMA-402 (Intl) support didn\'t find ICU in %s..' % icu_full_path) - if not os.path.isdir(icu_full_path): - error('''Cannot build Intl without ICU in %s. - Fix, or disable with "--with-intl=none"''' % icu_full_path) + warn("* ECMA-402 (Intl) support didn't find ICU in {icu_full_path}..") + if not pathlib.Path(icu_full_path).is_dir(): + error(f'''Cannot build Intl without ICU in {icu_full_path}. + Fix, or disable with "--with-intl=none"''') else: - print_verbose('* Using ICU in %s' % icu_full_path) + print_verbose(f'* Using ICU in {icu_full_path}') # Now, what version of ICU is it? We just need the "major", such as 54. # uvernum.h contains it as a #define. - uvernum_h = os.path.join(icu_full_path, 'source/common/unicode/uvernum.h') - if not os.path.isfile(uvernum_h): - error('Could not load %s - is ICU installed?' % uvernum_h) + uvernum_h = pathlib.Path(icu_full_path, 'source', 'common', 'unicode', 'uvernum.h') + if not uvernum_h.is_file(): + error(f'Could not load {uvernum_h} - is ICU installed?') icu_ver_major = None matchVerExp = r'^\s*#define\s+U_ICU_VERSION_SHORT\s+"([^"]*)".*' match_version = re.compile(matchVerExp) @@ -1840,27 +1833,24 @@ def icu_download(path): if m: icu_ver_major = str(m.group(1)) if not icu_ver_major: - error('Could not read U_ICU_VERSION_SHORT version from %s' % uvernum_h) + error(f'Could not read U_ICU_VERSION_SHORT version from {uvernum_h}') elif int(icu_ver_major) < icu_versions['minimum_icu']: - error('icu4c v%s.x is too old, v%d.x or later is required.' % - (icu_ver_major, icu_versions['minimum_icu'])) + error(f"icu4c v{icu_ver_major}.x is too old, v{icu_versions['minimum_icu']}.x or later is required.") icu_endianness = sys.byteorder[0] o['variables']['icu_ver_major'] = icu_ver_major o['variables']['icu_endianness'] = icu_endianness - icu_data_file_l = 'icudt%s%s.dat' % (icu_ver_major, 'l') # LE filename - icu_data_file = 'icudt%s%s.dat' % (icu_ver_major, icu_endianness) + icu_data_file_l = f'icudt{icu_ver_major}l.dat' # LE filename + icu_data_file = f'icudt{icu_ver_major}{icu_endianness}.dat' # relative to configure - icu_data_path = os.path.join(icu_full_path, - 'source/data/in', - icu_data_file_l) # LE - compressed_data = '%s.bz2' % (icu_data_path) - if not os.path.isfile(icu_data_path) and os.path.isfile(compressed_data): + icu_data_path = pathlib.Path(icu_full_path, 'source', 'data', 'in', icu_data_file_l) # LE + compressed_data = f'{icu_data_path}.bz2' + if not icu_data_path.is_file() and pathlib.Path(compressed_data).is_file(): # unpack. deps/icu is a temporary path - if os.path.isdir(icu_tmp_path): + if icu_tmp_path.is_dir(): shutil.rmtree(icu_tmp_path) - os.mkdir(icu_tmp_path) - icu_data_path = os.path.join(icu_tmp_path, icu_data_file_l) - with open(icu_data_path, 'wb') as outf: + icu_tmp_path.mkdir() + icu_data_path = icu_tmp_path / icu_data_file_l + with icu_data_path.open(mode='wb') as outf: inf = bz2.BZ2File(compressed_data, 'rb') try: shutil.copyfileobj(inf, outf) @@ -1869,20 +1859,18 @@ def icu_download(path): # Now, proceed.. # relative to dep.. - icu_data_in = os.path.join('..','..', icu_data_path) - if not os.path.isfile(icu_data_path) and icu_endianness != 'l': + icu_data_in = pathlib.Path('..', '..', icu_data_path) + if not icu_data_path.is_file() and icu_endianness != 'l': # use host endianness - icu_data_path = os.path.join(icu_full_path, - 'source/data/in', - icu_data_file) # will be generated - if not os.path.isfile(icu_data_path): + icu_data_path = icu_full_path / 'source' / 'data' / 'in' / icu_data_file # will be generated + if not icu_data_path.is_file(): # .. and we're not about to build it from .gyp! - error('''ICU prebuilt data file %s does not exist. - See the README.md.''' % icu_data_path) + error(f'''ICU prebuilt data file {icu_data_path} does not exist. + See the README.md.''') # this is the input '.dat' file to use .. icudt*.dat # may be little-endian if from a icu-project.org tarball - o['variables']['icu_data_in'] = icu_data_in + o['variables']['icu_data_in'] = str(icu_data_in) # map from variable name to subdirs icu_src = { @@ -1896,10 +1884,10 @@ def icu_download(path): } # this creates a variable icu_src_XXX for each of the subdirs # with a list of the src files to use - for i in icu_src: - var = 'icu_src_%s' % i - path = '../../%s/source/%s' % (icu_full_path, icu_src[i]) - icu_config['variables'][var] = glob_to_var('tools/icu', path, 'patches/%s/source/%s' % (icu_ver_major, icu_src[i]) ) + for key, value in icu_src.items(): + var = f'icu_src_{key}' + path = f'../../{icu_full_path}/source/{value}' + icu_config['variables'][var] = glob_to_var('tools/icu', path, f'patches/{icu_ver_major}/source/{value}') # calculate platform-specific genccode args # print("platform %s, flavor %s" % (sys.platform, flavor)) # if sys.platform == 'darwin': @@ -1950,8 +1938,9 @@ def configure_section_file(o): warn('''No acceptable ld.gold linker found!''') return 0 - match = re.match(r"^GNU gold.*([0-9]+)\.([0-9]+)$", - proc.communicate()[0].decode("utf-8")) + with proc: + match = re.match(r"^GNU gold.*([0-9]+)\.([0-9]+)$", + proc.communicate()[0].decode("utf-8")) if match: gold_major_version = match.group(1) @@ -1979,17 +1968,19 @@ def make_bin_override(): os.path.realpath(which_python) == os.path.realpath(sys.executable)): return - bin_override = os.path.abspath('out/tools/bin') + bin_override = pathlib.Path('out', 'tools', 'bin').resolve() try: - os.makedirs(bin_override) + bin_override.mkdir(parents=True) except OSError as e: - if e.errno != errno.EEXIST: raise e + if e.errno != errno.EEXIST: + raise e - python_link = os.path.join(bin_override, 'python') + python_link = bin_override / 'python' try: - os.unlink(python_link) + python_link.unlink() except OSError as e: - if e.errno != errno.ENOENT: raise e + if e.errno != errno.ENOENT: + raise e os.symlink(sys.executable, python_link) # We need to set the environment right now so that when gyp (in run_gyp) @@ -2013,7 +2004,7 @@ def make_bin_override(): # determine the "flavor" (operating system) we're building for, # leveraging gyp's GetFlavor function flavor_params = {} -if (options.dest_os): +if options.dest_os: flavor_params['flavor'] = options.dest_os flavor = GetFlavor(flavor_params) @@ -2037,12 +2028,12 @@ def make_bin_override(): # configure shareable builtins output['variables']['node_builtin_shareable_builtins'] = [] -for builtin in shareable_builtins: +for builtin, value in shareable_builtins.items(): builtin_id = 'node_shared_builtin_' + builtin.replace('/', '_') + '_path' if getattr(options, builtin_id): output['defines'] += [builtin_id.upper() + '=' + getattr(options, builtin_id)] else: - output['variables']['node_builtin_shareable_builtins'] += [shareable_builtins[builtin]] + output['variables']['node_builtin_shareable_builtins'] += [value] # Forward OSS-Fuzz settings output['variables']['ossfuzz'] = b(options.ossfuzz) @@ -2074,7 +2065,7 @@ def make_bin_override(): write('config.status', '#!/bin/sh\nset -x\nexec ./configure ' + ' '.join([shlex.quote(arg) for arg in original_argv]) + '\n') -os.chmod('config.status', 0o775) +pathlib.Path('config.status').chmod(0o775) config = {