diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b29ba4260c..fb922265c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -58,7 +58,7 @@ repos: # auto format Python codes within docstrings - id: blacken-docs - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.4.1 + rev: v0.4.2 hooks: # lint & attempt to correct failures (e.g. pyupgrade) - id: ruff diff --git a/conda_build/_link.py b/conda_build/_link.py index af841c0275..e8984fcd37 100644 --- a/conda_build/_link.py +++ b/conda_build/_link.py @@ -26,7 +26,7 @@ SITE_PACKAGES = "Lib/site-packages" else: BIN_DIR = join(PREFIX, "bin") - SITE_PACKAGES = "lib/python%s/site-packages" % sys.version[:3] + SITE_PACKAGES = f"lib/python{sys.version[:3]}/site-packages" # the list of these files is going to be store in info/_files FILES = [] @@ -110,20 +110,20 @@ def create_script(fn): dst = join(BIN_DIR, fn) if sys.platform == "win32": shutil.copy2(src, dst + "-script.py") - FILES.append("Scripts/%s-script.py" % fn) + FILES.append(f"Scripts/{fn}-script.py") shutil.copy2( join(THIS_DIR, "cli-%d.exe" % (8 * tuple.__itemsize__)), dst + ".exe" ) - FILES.append("Scripts/%s.exe" % fn) + FILES.append(f"Scripts/{fn}.exe") else: with open(src) as fi: data = fi.read() with open(dst, "w") as fo: - shebang = replace_long_shebang("#!%s\n" % normpath(sys.executable)) + shebang = replace_long_shebang(f"#!{normpath(sys.executable)}\n") fo.write(shebang) fo.write(data) os.chmod(dst, 0o775) - FILES.append("bin/%s" % fn) + FILES.append(f"bin/{fn}") def create_scripts(files): @@ -140,9 +140,9 @@ def main(): link_files("site-packages", SITE_PACKAGES, DATA["site-packages"]) link_files("Examples", "Examples", DATA["Examples"]) - with open(join(PREFIX, "conda-meta", "%s.files" % DATA["dist"]), "w") as fo: + with open(join(PREFIX, "conda-meta", "{}.files".format(DATA["dist"])), "w") as fo: for f in FILES: - fo.write("%s\n" % f) + fo.write(f"{f}\n") if __name__ == "__main__": diff --git a/conda_build/api.py b/conda_build/api.py index 571f08f534..cc866a865d 100644 --- a/conda_build/api.py +++ b/conda_build/api.py @@ -418,7 +418,7 @@ def convert( "Conversion from wheel packages is not implemented yet, stay tuned." ) else: - raise RuntimeError("cannot convert: %s" % package_file) + raise RuntimeError(f"cannot convert: {package_file}") def test_installable(channel: str = "defaults") -> bool: diff --git a/conda_build/build.py b/conda_build/build.py index f1bf8eec02..6dd2b49256 100644 --- a/conda_build/build.py +++ b/conda_build/build.py @@ -774,7 +774,7 @@ def copy_readme(m): if readme: src = join(m.config.work_dir, readme) if not isfile(src): - sys.exit("Error: no readme file: %s" % readme) + sys.exit(f"Error: no readme file: {readme}") dst = join(m.config.info_dir, readme) utils.copy_into(src, dst, m.config.timeout, locking=m.config.locking) if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}: @@ -1187,7 +1187,7 @@ def record_prefix_files(m, files_with_prefix): if fn in text_has_prefix_files: text_has_prefix_files.remove(fn) else: - ignored_because = " (not in build/%s_has_prefix_files)" % (mode) + ignored_because = f" (not in build/{mode}_has_prefix_files)" print( "{fn} ({mode}): {action}{reason}".format( @@ -1204,10 +1204,10 @@ def record_prefix_files(m, files_with_prefix): # make sure we found all of the files expected errstr = "" for f in text_has_prefix_files: - errstr += "Did not detect hard-coded path in %s from has_prefix_files\n" % f + errstr += f"Did not detect hard-coded path in {f} from has_prefix_files\n" for f in binary_has_prefix_files: errstr += ( - "Did not detect hard-coded path in %s from binary_has_prefix_files\n" % f + f"Did not detect hard-coded path in {f} from binary_has_prefix_files\n" ) if errstr: raise RuntimeError(errstr) @@ -1276,7 +1276,7 @@ def write_about_json(m): with open(join(m.config.info_dir, "about.json"), "w") as fo: d = {} for key, default in FIELDS["about"].items(): - value = m.get_value("about/%s" % key) + value = m.get_value(f"about/{key}") if value: d[key] = value if default is list: @@ -1332,7 +1332,7 @@ def write_info_json(m: MetaData): "# $ conda create --name --file " ) for dist in sorted(runtime_deps + [" ".join(m.dist().rsplit("-", 2))]): - fo.write("%s\n" % "=".join(dist.split())) + fo.write("{}\n".format("=".join(dist.split()))) mode_dict = {"mode": "w", "encoding": "utf-8"} with open(join(m.config.info_dir, "index.json"), **mode_dict) as fo: @@ -1355,10 +1355,10 @@ def get_entry_point_script_names(entry_point_scripts): for entry_point in entry_point_scripts: cmd = entry_point[: entry_point.find("=")].strip() if utils.on_win: - scripts.append("Scripts\\%s-script.py" % cmd) - scripts.append("Scripts\\%s.exe" % cmd) + scripts.append(f"Scripts\\{cmd}-script.py") + scripts.append(f"Scripts\\{cmd}.exe") else: - scripts.append("bin/%s" % cmd) + scripts.append(f"bin/{cmd}") return scripts @@ -1520,7 +1520,7 @@ def _recurse_symlink_to_size(path, seen=None): return _recurse_symlink_to_size(dest, seen=seen) elif not isfile(dest): # this is a symlink that points to nowhere, so is zero bytes - warnings.warn("file %s is a symlink with no target" % path, UserWarning) + warnings.warn(f"file {path} is a symlink with no target", UserWarning) return 0 return 0 @@ -1764,8 +1764,7 @@ def bundle_conda(output, metadata: MetaData, env, stats, **kw): var = var.split("=", 1)[0] elif var not in os.environ: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var, + f"The environment variable '{var}' specified in script_env is undefined.", UserWarning, ) val = "" @@ -3295,9 +3294,9 @@ def test( os.path.dirname(prefix), "_".join( ( - "%s_prefix_moved" % name, + f"{name}_prefix_moved", metadata.dist(), - getattr(metadata.config, "%s_subdir" % name), + getattr(metadata.config, f"{name}_subdir"), ) ), ) diff --git a/conda_build/cli/main_render.py b/conda_build/cli/main_render.py index a5cbb8b443..6e6f2bfa41 100644 --- a/conda_build/cli/main_render.py +++ b/conda_build/cli/main_render.py @@ -66,7 +66,7 @@ def get_render_parser() -> ArgumentParser: "--version", action="version", help="Show the conda-build version number and exit.", - version="conda-build %s" % __version__, + version=f"conda-build {__version__}", ) p.add_argument( "-n", diff --git a/conda_build/create_test.py b/conda_build/create_test.py index 1a8a0f1c34..441fe4a17c 100644 --- a/conda_build/create_test.py +++ b/conda_build/create_test.py @@ -122,7 +122,7 @@ def _create_test_files( fo.write( f"{comment_char} tests for {m.dist()} (this is a generated file);\n" ) - fo.write("print('===== testing package: %s =====');\n" % m.dist()) + fo.write(f"print('===== testing package: {m.dist()} =====');\n") try: with open(test_file) as fi: @@ -134,7 +134,7 @@ def _create_test_files( fo.write( "# tests were not packaged with this module, and cannot be run\n" ) - fo.write("\nprint('===== %s OK =====');\n" % m.dist()) + fo.write(f"\nprint('===== {m.dist()} OK =====');\n") return ( out_file, bool(name) and isfile(out_file) and basename(test_file) != "no-file", @@ -175,8 +175,8 @@ def create_py_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("import: %r")\n' % name) - fo.write("import %s\n" % name) + fo.write(f'print("import: {name!r}")\n') + fo.write(f"import {name}\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -202,8 +202,8 @@ def create_r_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a") as fo: for name in imports: - fo.write('print("library(%r)")\n' % name) - fo.write("library(%s)\n" % name) + fo.write(f'print("library({name!r})")\n') + fo.write(f"library({name})\n") fo.write("\n") return tf if (tf_exists or imports) else False @@ -225,11 +225,13 @@ def create_pl_files(m: MetaData, test_dir: os.PathLike) -> bool: break if tf_exists or imports: with open(tf, "a") as fo: - print(r'my $expected_version = "%s";' % m.version().rstrip("0"), file=fo) + print( + r'my $expected_version = "{}";'.format(m.version().rstrip("0")), file=fo + ) if imports: for name in imports: - print(r'print("import: %s\n");' % name, file=fo) - print("use %s;\n" % name, file=fo) + print(rf'print("import: {name}\n");', file=fo) + print(f"use {name};\n", file=fo) # Don't try to print version for complex imports if " " not in name: print( @@ -264,8 +266,8 @@ def create_lua_files(m: MetaData, test_dir: os.PathLike) -> bool: if imports: with open(tf, "a+") as fo: for name in imports: - print(r'print("require \"%s\"\n");' % name, file=fo) - print('require "%s"\n' % name, file=fo) + print(rf'print("require \"{name}\"\n");', file=fo) + print(f'require "{name}"\n', file=fo) return tf if (tf_exists or imports) else False diff --git a/conda_build/develop.py b/conda_build/develop.py index 59b31a3231..d0e3d59fd6 100644 --- a/conda_build/develop.py +++ b/conda_build/develop.py @@ -137,12 +137,11 @@ def execute( ) -> None: if not isdir(prefix): sys.exit( - """\ -Error: environment does not exist: %s + f"""\ +Error: environment does not exist: {prefix} # # Use 'conda create' to create the environment first. #""" - % prefix ) assert find_executable("python", prefix=prefix) diff --git a/conda_build/environ.py b/conda_build/environ.py index 5aae94e682..7a3a7ca8cb 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -536,8 +536,7 @@ def meta_vars(meta: MetaData, skip_build_id=False): value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' specified in script_env is undefined." - % var_name, + f"The environment variable '{var_name}' specified in script_env is undefined.", UserWarning, ) else: @@ -855,7 +854,7 @@ def get_install_actions( capture = utils.capture for feature, value in feature_list: if value: - specs.append("%s@" % feature) + specs.append(f"{feature}@") bldpkgs_dirs = ensure_list(bldpkgs_dirs) @@ -961,7 +960,7 @@ def get_install_actions( # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified if not any( - re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs + re.match(rf"^{pkg}(?:$|[\s=].*)", str(dep)) for dep in specs ): precs = [prec for prec in precs if prec.name != pkg] cached_precs[(specs, env, subdir, channel_urls, disable_pip)] = precs.copy() @@ -1341,7 +1340,7 @@ def _display_actions(prefix, precs): builder = ["", "## Package Plan ##\n"] if prefix: - builder.append(" environment location: %s" % prefix) + builder.append(f" environment location: {prefix}") builder.append("") print("\n".join(builder)) @@ -1385,9 +1384,9 @@ def channel_filt(s): # string with new-style string formatting. fmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers:<{maxver}}}" if maxchannels: - fmt[pkg] += " {channel:<%s}" % maxchannels + fmt[pkg] += f" {{channel:<{maxchannels}}}" if features[pkg]: - fmt[pkg] += " [{features:<%s}]" % maxfeatures + fmt[pkg] += f" [{{features:<{maxfeatures}}}]" lead = " " * 4 diff --git a/conda_build/exceptions.py b/conda_build/exceptions.py index f38706786a..9744ca14b4 100644 --- a/conda_build/exceptions.py +++ b/conda_build/exceptions.py @@ -110,19 +110,19 @@ class BuildLockError(CondaBuildException): class OverLinkingError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "overlinking check failed \n%s" % (error) + self.msg = f"overlinking check failed \n{error}" super().__init__(self.msg) class OverDependingError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "overdepending check failed \n%s" % (error) + self.msg = f"overdepending check failed \n{error}" super().__init__(self.msg) class RunPathError(RuntimeError): def __init__(self, error, *args): self.error = error - self.msg = "runpaths check failed \n%s" % (error) + self.msg = f"runpaths check failed \n{error}" super().__init__(self.msg) diff --git a/conda_build/inspect_pkg.py b/conda_build/inspect_pkg.py index 19c0db7ca3..43fc401551 100644 --- a/conda_build/inspect_pkg.py +++ b/conda_build/inspect_pkg.py @@ -132,7 +132,7 @@ def print_linkages( else sort_order.get(key[0], (4, key[0])) ), ): - output_string += "%s:\n" % prec + output_string += f"{prec}:\n" if show_files: for lib, path, binary in sorted(links): output_string += f" {lib} ({path}) from {binary}\n" @@ -296,7 +296,7 @@ def inspect_linkages( output_string += print_linkages(inverted_map[dep], show_files=show_files) else: - raise ValueError("Unrecognized groupby: %s" % groupby) + raise ValueError(f"Unrecognized groupby: {groupby}") if hasattr(output_string, "decode"): output_string = output_string.decode("utf-8") return output_string diff --git a/conda_build/license_family.py b/conda_build/license_family.py index 976cc1b33a..ab101274ae 100644 --- a/conda_build/license_family.py +++ b/conda_build/license_family.py @@ -29,7 +29,7 @@ gpl3_regex = re.compile("GPL[^2]*3") # match GPL3 gpl23_regex = re.compile("GPL[^2]*>= *2") # match GPL >= 2 cc_regex = re.compile(r"CC\w+") # match CC -punk_regex = re.compile("[%s]" % re.escape(string.punctuation)) # removes punks +punk_regex = re.compile(f"[{re.escape(string.punctuation)}]") # removes punks def match_gpl3(family): diff --git a/conda_build/metadata.py b/conda_build/metadata.py index 6fd065e0b2..2552682840 100644 --- a/conda_build/metadata.py +++ b/conda_build/metadata.py @@ -397,7 +397,7 @@ def ensure_valid_noarch_value(meta): build_noarch = meta.get("build", {}).get("noarch") if build_noarch and build_noarch not in NOARCH_TYPES: raise exceptions.CondaBuildException( - "Invalid value for noarch: %s" % build_noarch + f"Invalid value for noarch: {build_noarch}" ) @@ -828,7 +828,7 @@ def _get_env_path(env_name_or_path): break bootstrap_metadir = os.path.join(env_name_or_path, "conda-meta") if not os.path.isdir(bootstrap_metadir): - print("Bootstrap environment '%s' not found" % env_name_or_path) + print(f"Bootstrap environment '{env_name_or_path}' not found") sys.exit(1) return env_name_or_path @@ -1478,7 +1478,7 @@ def check_field(key, section): if section == "extra": continue if section not in FIELDS: - raise ValueError("unknown section: %s" % section) + raise ValueError(f"unknown section: {section}") for key_or_dict in submeta: if section in OPTIONALLY_ITERABLE_FIELDS and isinstance( key_or_dict, dict @@ -1492,17 +1492,17 @@ def check_field(key, section): def name(self) -> str: name = self.get_value("package/name", "") if not name and self.final: - sys.exit("Error: package/name missing in: %r" % self.meta_path) + sys.exit(f"Error: package/name missing in: {self.meta_path!r}") name = str(name) if name != name.lower(): - sys.exit("Error: package/name must be lowercase, got: %r" % name) + sys.exit(f"Error: package/name must be lowercase, got: {name!r}") check_bad_chrs(name, "package/name") return name def version(self) -> str: version = self.get_value("package/version", "") if not version and not self.get_section("outputs") and self.final: - sys.exit("Error: package/version missing in: %r" % self.meta_path) + sys.exit(f"Error: package/version missing in: {self.meta_path!r}") version = str(version) check_bad_chrs(version, "package/version") if self.final and version.startswith("."): @@ -1571,7 +1571,7 @@ def ms_depends(self, typ="run"): try: ms = MatchSpec(spec) except AssertionError: - raise RuntimeError("Invalid package specification: %r" % spec) + raise RuntimeError(f"Invalid package specification: {spec!r}") except (AttributeError, ValueError) as e: raise RuntimeError( "Received dictionary as spec. Note that pip requirements are " @@ -1580,7 +1580,7 @@ def ms_depends(self, typ="run"): if ms.name == self.name() and not ( typ == "build" and self.config.host_subdir != self.config.build_subdir ): - raise RuntimeError("%s cannot depend on itself" % self.name()) + raise RuntimeError(f"{self.name()} cannot depend on itself") for name, ver in name_ver_list: if ms.name == name: if self.noarch: @@ -1708,7 +1708,7 @@ def build_id(self): out = build_string_from_metadata(self) if self.config.filename_hashing and self.final: hash_ = self.hash_dependencies() - if not re.findall("h[0-9a-f]{%s}" % self.config.hash_length, out): + if not re.findall(f"h[0-9a-f]{{{self.config.hash_length}}}", out): ret = out.rsplit("_", 1) try: int(ret[0]) @@ -1718,14 +1718,14 @@ def build_id(self): if len(ret) > 1: out = "_".join([out] + ret[1:]) else: - out = re.sub("h[0-9a-f]{%s}" % self.config.hash_length, hash_, out) + out = re.sub(f"h[0-9a-f]{{{self.config.hash_length}}}", hash_, out) return out def dist(self): return f"{self.name()}-{self.version()}-{self.build_id()}" def pkg_fn(self): - return "%s.tar.bz2" % self.dist() + return f"{self.dist()}.tar.bz2" def is_app(self): return bool(self.get_value("app/entry")) @@ -1733,8 +1733,8 @@ def is_app(self): def app_meta(self): d = {"type": "app"} if self.get_value("app/icon"): - d["icon"] = "%s.png" % compute_sum( - join(self.path, self.get_value("app/icon")), "md5" + d["icon"] = "{}.png".format( + compute_sum(join(self.path, self.get_value("app/icon")), "md5") ) for field, key in [ @@ -2319,7 +2319,7 @@ def variant_in_source(self): # constrain the stored variants to only this version in the output # variant mapping if re.search( - r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, self.extract_source_text() + rf"\s*\{{\{{\s*{key}\s*(?:.*?)?\}}\}}", self.extract_source_text() ): return True return False diff --git a/conda_build/noarch_python.py b/conda_build/noarch_python.py index fb81565b3d..1e80fcd2e4 100644 --- a/conda_build/noarch_python.py +++ b/conda_build/noarch_python.py @@ -26,7 +26,7 @@ def rewrite_script(fn, prefix): try: data = fi.read() except UnicodeDecodeError: # file is binary - sys.exit("[noarch_python] Noarch package contains binary script: %s" % fn) + sys.exit(f"[noarch_python] Noarch package contains binary script: {fn}") src_mode = os.stat(src).st_mode os.unlink(src) @@ -83,7 +83,7 @@ def handle_file(f, d, prefix): else: # this should be the built-in logging module, not conda-build's stuff, because this file is standalone. log = logging.getLogger(__name__) - log.debug("Don't know how to handle file: %s. Including it as-is." % f) + log.debug(f"Don't know how to handle file: {f}. Including it as-is.") def populate_files(m, files, prefix, entry_point_scripts=None): @@ -119,7 +119,7 @@ def transform(m, files, prefix): # Create *nix prelink script # Note: it's important to use LF newlines or it wont work if we build on Win - with open(join(bin_dir, ".%s-pre-link.sh" % name), "wb") as fo: + with open(join(bin_dir, f".{name}-pre-link.sh"), "wb") as fo: fo.write( b"""\ #!/bin/bash @@ -128,7 +128,7 @@ def transform(m, files, prefix): ) # Create windows prelink script (be nice and use Windows newlines) - with open(join(scripts_dir, ".%s-pre-link.bat" % name), "wb") as fo: + with open(join(scripts_dir, f".{name}-pre-link.bat"), "wb") as fo: fo.write( """\ @echo off diff --git a/conda_build/os_utils/ldd.py b/conda_build/os_utils/ldd.py index b2de763074..84e80b8e90 100644 --- a/conda_build/os_utils/ldd.py +++ b/conda_build/os_utils/ldd.py @@ -44,7 +44,7 @@ def ldd(path): continue if "ld-linux" in line: continue - raise RuntimeError("Unexpected output from ldd: %s" % line) + raise RuntimeError(f"Unexpected output from ldd: {line}") return res diff --git a/conda_build/os_utils/liefldd.py b/conda_build/os_utils/liefldd.py index d02cd2bd30..d6ee2841d6 100644 --- a/conda_build/os_utils/liefldd.py +++ b/conda_build/os_utils/liefldd.py @@ -1125,9 +1125,9 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat ) if binary.__class__ != lief.MachO.Binary: if isinstance(s, str): - s_name = "%s" % s + s_name = f"{s}" else: - s_name = "%s" % s.name + s_name = f"{s.name}" if s.exported and s.imported: print(f"Weird, symbol {s.name} is both imported and exported") if s.exported: @@ -1136,16 +1136,16 @@ def get_symbols(file, defined=True, undefined=True, notexported=False, arch="nat elif s.imported: is_undefined = False else: - s_name = "%s" % s.name + s_name = f"{s.name}" is_notexported = False if s.type & 1 else True # print("{:32s} : s.type 0b{:020b}, s.value 0b{:020b}".format(s.name, s.type, s.value)) # print("s.value 0b{:020b} :: s.type 0b{:020b}, {:32s}".format(s.value, s.type, s.name)) if notexported is True or is_notexported is False: if is_undefined and undefined: - res.append("%s" % s_name) + res.append(f"{s_name}") elif not is_undefined and defined: - res.append("%s" % s_name) + res.append(f"{s_name}") return res diff --git a/conda_build/os_utils/macho.py b/conda_build/os_utils/macho.py index 516df7a0a6..17fc5d5a13 100644 --- a/conda_build/os_utils/macho.py +++ b/conda_build/os_utils/macho.py @@ -286,7 +286,7 @@ def add_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-add_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "would duplicate path, file already has LC_RPATH for:" in stderr: print("Skipping -add_rpath, file already has LC_RPATH set") @@ -304,7 +304,7 @@ def delete_rpath(path, rpath, build_prefix=None, verbose=False): args = ["-delete_rpath", rpath, path] code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s\n" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}\n") return elif "no LC_RPATH load command with path:" in stderr: print("Skipping -delete_rpath, file doesn't contain that LC_RPATH") @@ -341,7 +341,7 @@ def install_name_change(path, build_prefix, cb_func, dylibs, verbose=False): args.extend(("-change", dylibs[index]["name"], new_name, path)) code, _, stderr = install_name_tool(args, build_prefix) if "Mach-O dynamic shared library stub file" in stderr: - print("Skipping Mach-O dynamic shared library stub file %s" % path) + print(f"Skipping Mach-O dynamic shared library stub file {path}") ret = False continue else: diff --git a/conda_build/post.py b/conda_build/post.py index 30a4057a30..67c6a355a7 100644 --- a/conda_build/post.py +++ b/conda_build/post.py @@ -150,11 +150,11 @@ def write_pth(egg_path, config): with open( join( utils.get_site_packages(config.host_prefix, py_ver), - "%s.pth" % (fn.split("-")[0]), + "{}.pth".format(fn.split("-")[0]), ), "w", ) as fo: - fo.write("./%s\n" % fn) + fo.write(f"./{fn}\n") def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False): @@ -368,7 +368,7 @@ def find_lib(link, prefix, files, path=None): if link.startswith(prefix): link = normpath(link[len(prefix) + 1 :]) if not any(link == normpath(w) for w in files): - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") return link if link.startswith("/"): # but doesn't start with the build prefix return @@ -382,7 +382,7 @@ def find_lib(link, prefix, files, path=None): for f in files: file_names[basename(f)].append(f) if link not in file_names: - sys.exit("Error: Could not find %s" % link) + sys.exit(f"Error: Could not find {link}") if len(file_names[link]) > 1: if path and basename(path) == link: # The link is for the file itself, just use it @@ -403,7 +403,7 @@ def find_lib(link, prefix, files, path=None): "Choosing the first one." ) return file_names[link][0] - print("Don't know how to find %s, skipping" % link) + print(f"Don't know how to find {link}, skipping") def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): @@ -417,8 +417,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): ) if not codefile_class(link, skip_symlinks=True): sys.exit( - "Error: Compiler runtime library in build prefix not found in host prefix %s" - % link + f"Error: Compiler runtime library in build prefix not found in host prefix {link}" ) else: print(f".. fixing linking of {link} in {path} instead") @@ -429,7 +428,7 @@ def osx_ch_link(path, link_dict, host_prefix, build_prefix, files): return print(f"Fixing linking of {link} in {path}") - print("New link location is %s" % (link_loc)) + print(f"New link location is {link_loc}") lib_to_link = relpath(dirname(link_loc), "lib") # path_to_lib = utils.relative(path[len(prefix) + 1:]) @@ -647,7 +646,7 @@ def assert_relative_osx(path, host_prefix, build_prefix): for prefix in (host_prefix, build_prefix): if prefix and name.startswith(prefix): raise RuntimeError( - "library at %s appears to have an absolute path embedded" % path + f"library at {path} appears to have an absolute path embedded" ) @@ -1770,7 +1769,7 @@ def check_symlinks(files, prefix, croot): if msgs: for msg in msgs: - print("Error: %s" % msg, file=sys.stderr) + print(f"Error: {msg}", file=sys.stderr) sys.exit(1) diff --git a/conda_build/render.py b/conda_build/render.py index b021f8a5b6..cc3bcd87c0 100644 --- a/conda_build/render.py +++ b/conda_build/render.py @@ -115,7 +115,7 @@ def _categorize_deps(m, specs, exclude_pattern, variant): # for sake of comparison, ignore dashes and underscores if dash_or_under.sub("", key) == dash_or_under.sub( "", spec_name - ) and not re.search(r"%s\s+[0-9a-zA-Z\_\.\<\>\=\*]" % spec_name, spec): + ) and not re.search(rf"{spec_name}\s+[0-9a-zA-Z\_\.\<\>\=\*]", spec): dependencies.append(" ".join((spec_name, value))) elif exclude_pattern.match(spec): pass_through_deps.append(spec) diff --git a/conda_build/skeletons/cpan.py b/conda_build/skeletons/cpan.py index c9bd5c398c..31213054d1 100644 --- a/conda_build/skeletons/cpan.py +++ b/conda_build/skeletons/cpan.py @@ -511,9 +511,7 @@ def skeletonize( # packages, unless we're newer than what's in core if metacpan_api_is_core_version(meta_cpan_url, package): if not write_core: - print( - "We found core module %s. Skipping recipe creation." % packagename - ) + print(f"We found core module {packagename}. Skipping recipe creation.") continue d["useurl"] = "#" @@ -577,12 +575,11 @@ def skeletonize( version = None if exists(dir_path) and not force: print( - "Directory %s already exists and you have not specified --force " - % dir_path + f"Directory {dir_path} already exists and you have not specified --force " ) continue elif exists(dir_path) and force: - print("Directory %s already exists, but forcing recipe creation" % dir_path) + print(f"Directory {dir_path} already exists, but forcing recipe creation") try: d["homeurl"] = release_data["resources"]["homepage"] @@ -756,7 +753,7 @@ def deps_for_package( } packages_to_append = set() - print("Processing dependencies for %s..." % package, end="") + print(f"Processing dependencies for {package}...", end="") sys.stdout.flush() if not release_data.get("dependency"): @@ -1052,11 +1049,8 @@ def metacpan_api_is_core_version(cpan_url, module): return True else: sys.exit( - ( - "Error: Could not find module or distribution named" - " %s on MetaCPAN." - ) - % (module) + "Error: Could not find module or distribution named" + f" {module} on MetaCPAN." ) diff --git a/conda_build/skeletons/cran.py b/conda_build/skeletons/cran.py index 38628a52f4..93958333fb 100755 --- a/conda_build/skeletons/cran.py +++ b/conda_build/skeletons/cran.py @@ -489,7 +489,7 @@ def dict_from_cran_lines(lines): # - Suggests in corpcor (k, v) = line.split(":", 1) except ValueError: - sys.exit("Error: Could not parse metadata (%s)" % line) + sys.exit(f"Error: Could not parse metadata ({line})") d[k] = v # if k not in CRAN_KEYS: # print("Warning: Unknown key %s" % k) @@ -597,7 +597,7 @@ def read_description_contents(fp): def get_archive_metadata(path, verbose=True): if verbose: - print("Reading package metadata from %s" % path) + print(f"Reading package metadata from {path}") if basename(path) == "DESCRIPTION": with open(path, "rb") as fp: return read_description_contents(fp) @@ -614,8 +614,8 @@ def get_archive_metadata(path, verbose=True): fp = zf.open(member, "r") return read_description_contents(fp) else: - sys.exit("Cannot extract a DESCRIPTION from file %s" % path) - sys.exit("%s does not seem to be a CRAN package (no DESCRIPTION) file" % path) + sys.exit(f"Cannot extract a DESCRIPTION from file {path}") + sys.exit(f"{path} does not seem to be a CRAN package (no DESCRIPTION) file") def get_latest_git_tag(config): @@ -638,12 +638,12 @@ def get_latest_git_tag(config): stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if stderr or p.returncode: - sys.exit("Error: git tag failed (%s)" % stderr) + sys.exit(f"Error: git tag failed ({stderr})") tags = stdout.strip().splitlines() if not tags: sys.exit("Error: no tags found") - print("Using tag %s" % tags[-1]) + print(f"Using tag {tags[-1]}") return tags[-1] @@ -683,7 +683,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): r.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 404: - print("No archive directory for package %s" % package) + print(f"No archive directory for package {package}") return [] raise versions = [] @@ -698,7 +698,7 @@ def get_cran_archive_versions(cran_url, session, package, verbose=True): def get_cran_index(cran_url, session, verbose=True): if verbose: - print("Fetching main index from %s" % cran_url) + print(f"Fetching main index from {cran_url}") r = session.get(cran_url + "/src/contrib/") r.raise_for_status() records = {} @@ -775,7 +775,7 @@ def package_to_inputs_dict( """ if isfile(package): return None - print("Parsing input package %s:" % package) + print(f"Parsing input package {package}:") package = strip_end(package, "/") package = strip_end(package, sep) if "github.com" in package: @@ -1037,7 +1037,7 @@ def skeletonize( session = get_session(output_dir) cran_index = get_cran_index(cran_url, session) if pkg_name.lower() not in cran_index: - sys.exit("Package %s not found" % pkg_name) + sys.exit(f"Package {pkg_name} not found") package, cran_version = cran_index[pkg_name.lower()] if cran_version and (not version or version == cran_version): version = cran_version @@ -1048,8 +1048,7 @@ def skeletonize( sys.exit(1) elif not version and not cran_version and not allow_archived: print( - "ERROR: Package %s is archived; to build, use --allow-archived or a --version value" - % pkg_name + f"ERROR: Package {pkg_name} is archived; to build, use --allow-archived or a --version value" ) sys.exit(1) else: @@ -1325,7 +1324,7 @@ def skeletonize( if cran_package is None: cran_package = get_archive_metadata(description_path) d["cran_metadata"] = "\n".join( - ["# %s" % line for line in cran_package["orig_lines"] if line] + [f"# {line}" for line in cran_package["orig_lines"] if line] ) # Render the source and binaryN keys @@ -1377,7 +1376,7 @@ def skeletonize( d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package and not no_comments: - d["suggests"] = "# Suggests: %s" % cran_package["Suggests"] + d["suggests"] = "# Suggests: {}".format(cran_package["Suggests"]) else: d["suggests"] = "" @@ -1589,7 +1588,7 @@ def skeletonize( ) package_list.append(lower_name) - d["%s_depends" % dep_type] = "".join(deps) + d[f"{dep_type}_depends"] = "".join(deps) if no_comments: global CRAN_BUILD_SH_SOURCE, CRAN_META @@ -1603,7 +1602,7 @@ def skeletonize( if update_policy == "error": raise RuntimeError( "directory already exists " - "(and --update-policy is 'error'): %s" % dir_path + f"(and --update-policy is 'error'): {dir_path}" ) elif update_policy == "overwrite": rm_rf(dir_path) @@ -1626,7 +1625,7 @@ def skeletonize( makedirs(join(dir_path)) except: pass - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(dir_path, "meta.yaml"), "w") as f: f.write(clear_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, "build.sh")) or update_policy == "overwrite": @@ -1683,14 +1682,14 @@ def get_outdated(output_dir, cran_index, packages=()): continue if recipe_name not in cran_index: - print("Skipping %s, not found on CRAN" % recipe) + print(f"Skipping {recipe}, not found on CRAN") continue version_compare( join(output_dir, recipe), cran_index[recipe_name][1].replace("-", "_") ) - print("Updating %s" % recipe) + print(f"Updating {recipe}") to_update.append(recipe_name) return to_update diff --git a/conda_build/skeletons/luarocks.py b/conda_build/skeletons/luarocks.py index da8e641928..41ec499bad 100644 --- a/conda_build/skeletons/luarocks.py +++ b/conda_build/skeletons/luarocks.py @@ -174,7 +174,7 @@ def package_exists(package_name): def getval(spec, k): if k not in spec: - raise Exception("Required key %s not in spec" % k) + raise Exception(f"Required key {k} not in spec") else: return spec[k] @@ -184,7 +184,7 @@ def warn_against_branches(branch): print("=========================================") print("") print("WARNING:") - print("Building a rock referenced to branch %s." % branch) + print(f"Building a rock referenced to branch {branch}.") print("This is not a tag. This is dangerous, because rebuilding") print("at a later date may produce a different package.") print("Please replace with a tag, git commit, or tarball.") @@ -253,7 +253,7 @@ def skeletonize( package = packages.pop() packagename = ( - "lua-%s" % package.lower() if package[:4] != "lua-" else package.lower() + f"lua-{package.lower()}" if package[:4] != "lua-" else package.lower() ) d = package_dicts.setdefault( package, @@ -372,13 +372,13 @@ def skeletonize( modules = spec["build"]["platforms"][our_plat]["modules"] if modules: d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'\"""" % r for r in modules.keys()] + [""] + [f"""lua -e "require '{r}'\"""" for r in modules.keys()] ) # If we didn't find any modules to import, import the base name if d["test_commands"] == "": d["test_commands"] = INDENT.join( - [""] + ["""lua -e "require '%s'" """ % d["rockname"]] + [""] + ["""lua -e "require '{}'" """.format(d["rockname"])] ) # Build the luarocks skeleton diff --git a/conda_build/skeletons/pypi.py b/conda_build/skeletons/pypi.py index c45c843a6d..d3b716bc8b 100644 --- a/conda_build/skeletons/pypi.py +++ b/conda_build/skeletons/pypi.py @@ -300,7 +300,7 @@ def skeletonize( if not is_url: dir_path = join(output_dir, package.lower()) if exists(dir_path) and not version_compare: - raise RuntimeError("directory already exists: %s" % dir_path) + raise RuntimeError(f"directory already exists: {dir_path}") d = package_dicts.setdefault( package, { @@ -343,14 +343,12 @@ def skeletonize( else: # select the most visible version from PyPI. if not versions: - sys.exit( - "Error: Could not find any versions of package %s" % package - ) + sys.exit(f"Error: Could not find any versions of package {package}") if len(versions) > 1: - print("Warning, the following versions were found for %s" % package) + print(f"Warning, the following versions were found for {package}") for ver in versions: print(ver) - print("Using %s" % versions[-1]) + print(f"Using {versions[-1]}") print("Use --version to specify a different version.") d["version"] = versions[-1] @@ -404,7 +402,7 @@ def skeletonize( d = package_dicts[package] name = d["packagename"].lower() makedirs(join(output_dir, name)) - print("Writing recipe for %s" % package.lower()) + print(f"Writing recipe for {package.lower()}") with open(join(output_dir, name, "meta.yaml"), "w") as f: rendered_recipe = PYPI_META_HEADER.format(**d) @@ -642,8 +640,8 @@ def get_download_data( if not urls[0]["url"]: # The package doesn't have a url, or maybe it only has a wheel. sys.exit( - "Error: Could not build recipe for %s. " - "Could not find any valid urls." % package + f"Error: Could not build recipe for {package}. " + "Could not find any valid urls." ) U = parse_url(urls[0]["url"]) if not U.path: @@ -652,9 +650,9 @@ def get_download_data( fragment = U.fragment or "" digest = fragment.split("=") else: - sys.exit("Error: No source urls found for %s" % package) + sys.exit(f"Error: No source urls found for {package}") if len(urls) > 1 and not noprompt: - print("More than one source version is available for %s:" % package) + print(f"More than one source version is available for {package}:") if manual_url: for i, url in enumerate(urls): print( @@ -689,7 +687,7 @@ def get_download_data( filename = url["filename"] or "package" else: # User provided a URL, try to use it. - print("Using url %s" % package) + print(f"Using url {package}") pypiurl = package U = parse_url(package) digest = U.fragment.split("=") @@ -711,7 +709,7 @@ def version_compare(package, versions): recipe_dir = abspath(package.lower()) if not isdir(recipe_dir): - sys.exit("Error: no such directory: %s" % recipe_dir) + sys.exit(f"Error: no such directory: {recipe_dir}") m = MetaData(recipe_dir) local_version = nv(m.version()) print(f"Local recipe for {package} has version {local_version}") @@ -721,11 +719,11 @@ def version_compare(package, versions): # Comparing normalized versions, displaying non normalized ones new_versions = versions[: norm_versions.index(local_version)] if len(new_versions) > 0: - print("Following new versions of %s are avaliable" % (package)) + print(f"Following new versions of {package} are avaliable") for ver in new_versions: print(ver) else: - print("No new version for %s is available" % (package)) + print(f"No new version for {package} is available") sys.exit() @@ -828,7 +826,7 @@ def get_package_metadata( config, setup_options, ): - print("Downloading %s" % package) + print(f"Downloading {package}") print("PyPI URL: ", metadata["pypiurl"]) pkginfo = get_pkginfo( package, @@ -982,7 +980,7 @@ def _spec_from_line(line): ) spec = _spec_from_line(dep_orig) if spec is None: - sys.exit("Error: Could not parse: %s" % dep) + sys.exit(f"Error: Could not parse: {dep}") if marker: spec = " ".join((spec, marker)) @@ -1058,10 +1056,10 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): if no_prompt: return license_name elif "\n" not in license_name: - print('Using "%s" for the license' % license_name) + print(f'Using "{license_name}" for the license') else: # Some projects put the whole license text in this field - print("This is the license for %s" % package) + print(f"This is the license for {package}") print() print(license_name) print() @@ -1070,8 +1068,8 @@ def get_license_name(package, pkginfo, no_prompt=False, data=None): license_name = "UNKNOWN" else: license_name = input( - "No license could be found for %s on PyPI or in the source. " - "What license should I use? " % package + f"No license could be found for {package} on PyPI or in the source. " + "What license should I use? " ) return license_name @@ -1175,7 +1173,7 @@ def unpack(src_path, tempdir): if src_path.lower().endswith(decompressible_exts): tar_xf(src_path, tempdir) else: - raise Exception("not a valid source: %s" % src_path) + raise Exception(f"not a valid source: {src_path}") def get_dir(tempdir): @@ -1209,7 +1207,7 @@ def get_requirements(package, pkginfo, all_extras=True): try: extras_require = [pkginfo["extras_require"][x] for x in extras] except KeyError: - sys.exit("Error: Invalid extra features: [%s]" % ",".join(extras)) + sys.exit("Error: Invalid extra features: [{}]".format(",".join(extras))) # match PEP 508 environment markers; currently only matches the # subset of environment markers that compare to python_version # using a single basic Python comparison operator @@ -1297,10 +1295,10 @@ def get_pkginfo( else: new_hash_value = "" - print("Unpacking %s..." % package) + print(f"Unpacking {package}...") unpack(join(config.src_cache, filename), tempdir) print("done") - print("working in %s" % tempdir) + print(f"working in {tempdir}") src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy( @@ -1366,7 +1364,7 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op ) stdlib_dir = join( config.host_prefix, - "Lib" if on_win else "lib/python%s" % python_version, + "Lib" if on_win else f"lib/python{python_version}", ) patch = join(temp_dir, "pypi-distutils.patch") @@ -1421,8 +1419,8 @@ def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config, setup_op try: check_call_env(cmdargs, env=env) except subprocess.CalledProcessError: - print("$PYTHONPATH = %s" % env["PYTHONPATH"]) - sys.exit("Error: command failed: %s" % " ".join(cmdargs)) + print("$PYTHONPATH = {}".format(env["PYTHONPATH"])) + sys.exit("Error: command failed: {}".format(" ".join(cmdargs))) finally: chdir(cwd) diff --git a/conda_build/source.py b/conda_build/source.py index c7b3d1921b..903f5d7ca0 100644 --- a/conda_build/source.py +++ b/conda_build/source.py @@ -55,7 +55,7 @@ def append_hash_to_fn(fn, hash_value): def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): """Download a source to the local cache.""" if verbose: - log.info("Source cache directory is: %s" % cache_folder) + log.info(f"Source cache directory is: {cache_folder}") if not isdir(cache_folder) and not os.path.islink(cache_folder): os.makedirs(cache_folder) @@ -81,10 +81,10 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): path = join(cache_folder, fn) if isfile(path): if verbose: - log.info("Found source in cache: %s" % fn) + log.info(f"Found source in cache: {fn}") else: if verbose: - log.info("Downloading source to cache: %s" % fn) + log.info(f"Downloading source to cache: {fn}") for url in source_urls: if "://" not in url: @@ -98,14 +98,14 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): url = "file:///" + expanduser(url[8:]).replace("\\", "/") try: if verbose: - log.info("Downloading %s" % url) + log.info(f"Downloading {url}") with LoggingContext(): download(url, path) except CondaHTTPError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) except RuntimeError as e: - log.warn("Error: %s" % str(e).strip()) + log.warn(f"Error: {str(e).strip()}") rm_rf(path) else: if verbose: @@ -113,7 +113,7 @@ def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False): break else: # no break rm_rf(path) - raise RuntimeError("Could not download %s" % url) + raise RuntimeError(f"Could not download {url}") hashed = None for tp in ("md5", "sha1", "sha256"): @@ -344,7 +344,7 @@ def git_mirror_checkout_recursive( ) checkout = output.decode("utf-8") if verbose: - print("checkout: %r" % checkout) + print(f"checkout: {checkout!r}") if checkout: check_call_env( [git, "checkout", checkout], @@ -492,7 +492,7 @@ def git_info(src_dir, build_prefix, git=None, verbose=True, fo=None): stdout = check_output_env(cmd, stderr=stderr, cwd=src_dir, env=env) except CalledProcessError as e: if check_error: - raise Exception("git error: %s" % str(e)) + raise Exception(f"git error: {str(e)}") encoding = locale.getpreferredencoding() if not fo: encoding = sys.stdout.encoding @@ -535,7 +535,7 @@ def hg_source(source_dict, src_dir, hg_cache, verbose): # now clone in to work directory update = source_dict.get("hg_tag") or "tip" if verbose: - print("checkout: %r" % update) + print(f"checkout: {update!r}") check_call_env(["hg", "clone", cache_repo, src_dir], stdout=stdout, stderr=stderr) check_call_env( @@ -953,7 +953,7 @@ def try_apply_patch(patch, patch_args, cwd, stdout, stderr): exception = None if not isfile(path): - raise RuntimeError("Error: no such patch: %s" % path) + raise RuntimeError(f"Error: no such patch: {path}") if config.verbose: stdout = None diff --git a/conda_build/tarcheck.py b/conda_build/tarcheck.py index 3a98559187..374422f1e1 100644 --- a/conda_build/tarcheck.py +++ b/conda_build/tarcheck.py @@ -13,7 +13,7 @@ def dist_fn(fn): elif fn.endswith(".tar.bz2"): return fn[:-8] else: - raise Exception("did not expect filename: %r" % fn) + raise Exception(f"did not expect filename: {fn!r}") class TarCheck: @@ -51,9 +51,9 @@ def info_files(self): return for p in sorted(seta | setb): if p not in seta: - print("%r not in info/files" % p) + print(f"{p!r} not in info/files") if p not in setb: - print("%r not in tarball" % p) + print(f"{p!r} not in tarball") raise Exception("info/files") def index_json(self): diff --git a/conda_build/utils.py b/conda_build/utils.py index 4a3e1f782c..796f849caf 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -425,7 +425,7 @@ def bytes2human(n): if n >= prefix[s]: value = float(n) / prefix[s] return f"{value:.1f}{s}" - return "%sB" % n + return f"{n}B" def seconds2human(s): @@ -458,7 +458,7 @@ def get_recipe_abspath(recipe): tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: - print("Ignoring non-recipe: %s" % recipe) + print(f"Ignoring non-recipe: {recipe}") return (None, None) else: recipe_dir = abspath(os.path.join(os.getcwd(), recipe)) @@ -1054,7 +1054,7 @@ def iter_entry_points(items): for item in items: m = entry_pat.match(item) if m is None: - sys.exit("Error cound not match entry point: %r" % item) + sys.exit(f"Error cound not match entry point: {item!r}") yield m.groups() @@ -1076,7 +1076,7 @@ def create_entry_point(path, module, func, config): os.remove(path) with open(path, "w") as fo: if not config.noarch: - fo.write("#!%s\n" % config.host_python) + fo.write(f"#!{config.host_python}\n") fo.write(pyscript) os.chmod(path, 0o775) @@ -1951,7 +1951,7 @@ def insert_variant_versions(requirements_dict, variant, env): ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r"^(%s)(?:\s*$)" % key.replace("_", "[-_]")) + regex = re.compile(r"^({})(?:\s*$)".format(key.replace("_", "[-_]"))) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): diff --git a/conda_build/variants.py b/conda_build/variants.py index 1e2b1adc0c..447025818c 100644 --- a/conda_build/variants.py +++ b/conda_build/variants.py @@ -745,13 +745,13 @@ def find_used_variables_in_text(variant, recipe_text, selectors_only=False): continue v_regex = re.escape(v) v_req_regex = "[-_]".join(map(re.escape, v.split("_"))) - variant_regex = r"\{\s*(?:pin_[a-z]+\(\s*?['\"])?%s[^'\"]*?\}\}" % v_regex - selector_regex = r"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" % v_regex + variant_regex = rf"\{{\s*(?:pin_[a-z]+\(\s*?['\"])?{v_regex}[^'\"]*?\}}\}}" + selector_regex = rf"^[^#\[]*?\#?\s\[[^\]]*?(?!\]]" conditional_regex = ( r"(?:^|[^\{])\{%\s*(?:el)?if\s*.*" + v_regex + r"\s*(?:[^%]*?)?%\}" ) # plain req name, no version spec. Look for end of line after name, or comment or selector - requirement_regex = r"^\s+\-\s+%s\s*(?:\s[\[#]|$)" % v_req_regex + requirement_regex = rf"^\s+\-\s+{v_req_regex}\s*(?:\s[\[#]|$)" if selectors_only: all_res.insert(0, selector_regex) else: diff --git a/conda_build/windows.py b/conda_build/windows.py index 706b499265..00287c50bf 100644 --- a/conda_build/windows.py +++ b/conda_build/windows.py @@ -56,16 +56,13 @@ def fix_staged_scripts(scripts_dir, config): # If it's a #!python script if not (line.startswith(b"#!") and b"python" in line.lower()): continue - print( - "Adjusting unix-style #! script %s, " - "and adding a .bat file for it" % fn - ) + print(f"Adjusting unix-style #! script {fn}, and adding a .bat file for it") # copy it with a .py extension (skipping that first #! line) with open(join(scripts_dir, fn + "-script.py"), "wb") as fo: fo.write(f.read()) # now create the .exe file copy_into( - join(dirname(__file__), "cli-%s.exe" % config.host_arch), + join(dirname(__file__), f"cli-{config.host_arch}.exe"), join(scripts_dir, fn + ".exe"), ) @@ -338,7 +335,7 @@ def build(m, bld_bat, stats, provision_only=False): rewrite_env = { k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env } - print("Rewriting env in output: %s" % pprint.pformat(rewrite_env)) + print(f"Rewriting env in output: {pprint.pformat(rewrite_env)}") check_call_env( cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env ) diff --git a/docs/scrape_help.py b/docs/scrape_help.py index 2f99fbb403..66d5af1e57 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -112,7 +112,7 @@ def external_commands(): def get_help(command): command_help[command] = conda_command_help(command) - print("Checked for subcommand help for %s" % command) + print(f"Checked for subcommand help for {command}") with ThreadPoolExecutor(len(commands)) as executor: # list() is needed for force exceptions to be raised @@ -164,7 +164,7 @@ def generate_man(command): [ "help2man", "--name", - "conda %s" % command, + f"conda {command}", "--section", "1", "--source", @@ -172,36 +172,34 @@ def generate_man(command): "--version-string", conda_version, "--no-info", - "conda %s" % command, + f"conda {command}", ] ) retries -= 1 if not manpage: - sys.exit("Error: Could not get help for conda %s" % command) + sys.exit(f"Error: Could not get help for conda {command}") replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, "conda-%s.1" % command.replace(" ", "-")), "w") as f: + with open(join(manpath, "conda-{}.1".format(command.replace(" ", "-"))), "w") as f: f.write(manpage) - print("Generated manpage for conda %s" % command) + print(f"Generated manpage for conda {command}") def generate_html(command): command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen( - ["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE - ) + man = Popen(["man", abspath(join(manpath, f"conda-{command_file}.1"))], stdout=PIPE) htmlpage = check_output( [ "man2html", "-bare", # Don't use HTML, HEAD, or BODY tags "title", - "conda-%s" % command_file, + f"conda-{command_file}", "-topm", "0", # No top margin "-botm", @@ -210,14 +208,14 @@ def generate_html(command): stdin=man.stdout, ) - with open(join(manpath, "conda-%s.html" % command_file), "wb") as f: + with open(join(manpath, f"conda-{command_file}.html"), "wb") as f: f.write(htmlpage) - print("Generated html for conda %s" % command) + print(f"Generated html for conda {command}") def write_rst(command, sep=None): command_file = command.replace(" ", "-") - with open(join(manpath, "conda-%s.html" % command_file)) as f: + with open(join(manpath, f"conda-{command_file}.html")) as f: html = f.read() rp = rstpath @@ -225,13 +223,13 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, "conda-%s.rst" % command_file), "w") as f: + with open(join(rp, f"conda-{command_file}.rst"), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): f.write(" ") f.write(line) f.write("\n") - print("Generated rst for conda %s" % command) + print(f"Generated rst for conda {command}") def main(): diff --git a/tests/test_api_build_conda_v2.py b/tests/test_api_build_conda_v2.py index 4c0c09b9ac..dc4078e61f 100644 --- a/tests/test_api_build_conda_v2.py +++ b/tests/test_api_build_conda_v2.py @@ -40,4 +40,4 @@ def test_conda_pkg_format( # Verify that test pass ran through api assert "Manual entry point" in out - assert "TEST END: %s" % output_file in out + assert f"TEST END: {output_file}" in out diff --git a/tests/utils.py b/tests/utils.py index b4ed64912b..4d6803f09d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -91,8 +91,7 @@ def assert_package_consistency(package_path): has_prefix_present = False except tarfile.ReadError: raise RuntimeError( - "Could not extract metadata from %s. " - "File probably corrupt." % package_path + f"Could not extract metadata from {package_path}. File probably corrupt." ) errors = [] member_set = set(member_list) # The tar format allows duplicates in member_list @@ -101,7 +100,7 @@ def assert_package_consistency(package_path): file_set = set(file_list) # Check that there are no duplicates in info/files if len(file_list) != len(file_set): - errors.append("Duplicate files in info/files in %s" % package_path) + errors.append(f"Duplicate files in info/files in {package_path}") # Compare the contents of files and members unlisted_members = member_set.difference(file_set) missing_members = file_set.difference(member_set) @@ -109,14 +108,16 @@ def assert_package_consistency(package_path): missing_files = [m for m in unlisted_members if not m.startswith("info/")] if len(missing_files) > 0: errors.append( - "The following package files are not listed in " - "info/files: %s" % ", ".join(missing_files) + "The following package files are not listed in info/files: {}".format( + ", ".join(missing_files) + ) ) # Find any files missing in the archive if len(missing_members) > 0: errors.append( - "The following files listed in info/files are missing: " - "%s" % ", ".join(missing_members) + "The following files listed in info/files are missing: {}".format( + ", ".join(missing_members) + ) ) # Find any files in has_prefix that are not present in files if has_prefix_present: @@ -129,15 +130,15 @@ def assert_package_consistency(package_path): elif len(parts) == 3: prefix_path_list.append(parts[2]) else: - errors.append("Invalid has_prefix file in package: %s" % package_path) + errors.append(f"Invalid has_prefix file in package: {package_path}") prefix_path_set = set(prefix_path_list) if len(prefix_path_list) != len(prefix_path_set): - errors.append("Duplicate files in info/has_prefix in %s" % package_path) + errors.append(f"Duplicate files in info/has_prefix in {package_path}") prefix_not_in_files = prefix_path_set.difference(file_set) if len(prefix_not_in_files) > 0: errors.append( "The following files listed in info/has_prefix are missing " - "from info/files: %s" % ", ".join(prefix_not_in_files) + "from info/files: {}".format(", ".join(prefix_not_in_files)) ) # Assert that no errors are detected