diff --git a/docs/source/conf.py b/docs/source/conf.py index 11c156cf..4e33931a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -61,12 +61,10 @@ from repo2docker.buildpacks.conda import CondaBuildPack default_python = CondaBuildPack.major_pythons["3"] -rst_prolog = """ +rst_prolog = f""" .. |default_python| replace:: **Python {default_python}** .. |default_python_version| replace:: {default_python} -""".format( - default_python=default_python -) +""" # -- Options for HTML output ------------------------------------------------- diff --git a/repo2docker/__main__.py b/repo2docker/__main__.py index c3e7ef7e..2ca17f17 100644 --- a/repo2docker/__main__.py +++ b/repo2docker/__main__.py @@ -304,8 +304,8 @@ def make_r2d(argv=None): r2d.volumes[os.path.abspath(args.repo)] = "." else: r2d.log.error( - 'Cannot mount "{}" in editable mode ' - "as it is not a directory".format(args.repo), + f'Cannot mount "{args.repo}" in editable mode ' + "as it is not a directory", extra=dict(phase=R2dState.FAILED), ) sys.exit(1) diff --git a/repo2docker/_version.py b/repo2docker/_version.py index aca7b127..017b2661 100644 --- a/repo2docker/_version.py +++ b/repo2docker/_version.py @@ -293,7 +293,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + pieces["error"] = f"unable to parse git-describe output: '{describe_out}'" return pieces # tag @@ -302,10 +302,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( - full_tag, - tag_prefix, - ) + pieces[ + "error" + ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'" return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] diff --git a/repo2docker/app.py b/repo2docker/app.py index 718b12b2..5bee81b4 100755 --- a/repo2docker/app.py +++ b/repo2docker/app.py @@ -425,9 +425,7 @@ class Repo2Docker(Application): entry = engines[self.engine] except KeyError: raise ContainerEngineException( - "Container engine '{}' not found. Available engines: {}".format( - self.engine, ",".join(engines.keys()) - ) + f"Container engine '{self.engine}' not found. Available engines: {','.join(engines.keys())}" ) engine_class = entry.load() return engine_class(parent=self) @@ -447,16 +445,11 @@ class Repo2Docker(Application): spec = cp.detect(url, ref=ref) if spec is not None: picked_content_provider = cp - self.log.info( - "Picked {cp} content " - "provider.\n".format(cp=cp.__class__.__name__) - ) + self.log.info(f"Picked {cp.__class__.__name__} content provider.\n") break if picked_content_provider is None: - self.log.error( - "No matching content provider found for " "{url}.".format(url=url) - ) + self.log.error(f"No matching content provider found for {url}.") swh_token = self.config.get("swh_token", self.swh_token) if swh_token and isinstance(picked_content_provider, contentproviders.Swhid): @@ -488,8 +481,7 @@ class Repo2Docker(Application): Avoids non-JSON output on errors when using --json-logs """ self.log.error( - "Error during build: %s", - evalue, + f"Error during build: {evalue}", exc_info=(etype, evalue, traceback), extra=dict(phase=R2dState.FAILED), ) @@ -619,11 +611,9 @@ class Repo2Docker(Application): run_cmd = [ "jupyter", "notebook", - "--ip", - "0.0.0.0", - "--port", - container_port, - f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}" + "--ip=0.0.0.0", + f"--port={container_port}", + f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}", "--NotebookApp.default_url=/lab", ] else: @@ -730,7 +720,7 @@ class Repo2Docker(Application): try: docker_client = self.get_engine() except ContainerEngineException as e: - self.log.error("\nContainer engine initialization error: %s\n", e) + self.log.error(f"\nContainer engine initialization error: {e}\n") self.exit(1) # If the source to be executed is a directory, continue using the @@ -751,8 +741,7 @@ class Repo2Docker(Application): if self.find_image(): self.log.info( - "Reusing existing image ({}), not " - "building.".format(self.output_image_spec) + f"Reusing existing image ({self.output_image_spec}), not building." ) # no need to build, so skip to the end by `return`ing here # this will still execute the finally clause and let's us @@ -763,8 +752,7 @@ class Repo2Docker(Application): checkout_path = os.path.join(checkout_path, self.subdir) if not os.path.isdir(checkout_path): self.log.error( - "Subdirectory %s does not exist", - self.subdir, + f"Subdirectory {self.subdir} does not exist", extra=dict(phase=R2dState.FAILED), ) raise FileNotFoundError(f"Could not find {checkout_path}") @@ -808,8 +796,7 @@ class Repo2Docker(Application): ) self.log.info( - "Using %s builder\n", - bp.__class__.__name__, + f"Using {bp.__class__.__name__} builder\n", extra=dict(phase=R2dState.BUILDING), ) diff --git a/repo2docker/buildpacks/_r_base.py b/repo2docker/buildpacks/_r_base.py index c7327379..35a3177b 100644 --- a/repo2docker/buildpacks/_r_base.py +++ b/repo2docker/buildpacks/_r_base.py @@ -26,7 +26,7 @@ def rstudio_base_scripts(r_version): # we should have --no-install-recommends on all our apt-get install commands, # but here it's important because these recommend r-base, # which will upgrade the installed version of R, undoing our pinned version - r""" + rf""" curl --silent --location --fail {rstudio_url} > /tmp/rstudio.deb && \ curl --silent --location --fail {shiny_server_url} > /tmp/shiny.deb && \ echo '{rstudio_sha256sum} /tmp/rstudio.deb' | sha256sum -c - && \ @@ -37,24 +37,16 @@ def rstudio_base_scripts(r_version): apt-get -qq purge && \ apt-get -qq clean && \ rm -rf /var/lib/apt/lists/* - """.format( - rstudio_url=rstudio_url, - rstudio_sha256sum=rstudio_sha256sum, - shiny_server_url=shiny_server_url, - shiny_sha256sum=shiny_sha256sum, - ), + """, ), ( "${NB_USER}", # Install jupyter-rsession-proxy - r""" + rf""" pip install --no-cache \ jupyter-rsession-proxy=={rsession_proxy_version} \ jupyter-shiny-proxy=={shiny_proxy_version} - """.format( - rsession_proxy_version=rsession_proxy_version, - shiny_proxy_version=shiny_proxy_version, - ), + """, ), ( # Not all of these locations are configurable; so we make sure diff --git a/repo2docker/buildpacks/base.py b/repo2docker/buildpacks/base.py index 16476e9d..0758afc6 100644 --- a/repo2docker/buildpacks/base.py +++ b/repo2docker/buildpacks/base.py @@ -594,8 +594,8 @@ class BuildPack: # buildpacks/docker.py where it is duplicated if not isinstance(memory_limit, int): raise ValueError( - "The memory limit has to be specified as an" - "integer but is '{}'".format(type(memory_limit)) + "The memory limit has to be specified as an " + f"integer but is '{type(memory_limit)}'" ) limits = {} if memory_limit: @@ -647,8 +647,7 @@ class BaseImage(BuildPack): # FIXME: Add support for specifying version numbers if not re.match(r"^[a-z0-9.+-]+", package): raise ValueError( - "Found invalid package name {} in " - "apt.txt".format(package) + f"Found invalid package name {package} in apt.txt" ) extra_apt_packages.append(package) diff --git a/repo2docker/buildpacks/conda/__init__.py b/repo2docker/buildpacks/conda/__init__.py index 662e70bf..8c4426dc 100644 --- a/repo2docker/buildpacks/conda/__init__.py +++ b/repo2docker/buildpacks/conda/__init__.py @@ -341,15 +341,13 @@ class CondaBuildPack(BaseImage): scripts.append( ( "${NB_USER}", - r""" + rf""" TIMEFORMAT='time: %3R' \ - bash -c 'time ${{MAMBA_EXE}} env update -p {0} --file "{1}" && \ + bash -c 'time ${{MAMBA_EXE}} env update -p {env_prefix} --file "{environment_yml}" && \ time ${{MAMBA_EXE}} clean --all -f -y && \ - ${{MAMBA_EXE}} list -p {0} \ + ${{MAMBA_EXE}} list -p {env_prefix} \ ' - """.format( - env_prefix, environment_yml - ), + """, ) ) @@ -361,36 +359,30 @@ class CondaBuildPack(BaseImage): scripts.append( ( "${NB_USER}", - r""" - ${{MAMBA_EXE}} install -p {0} r-base{1} r-irkernel r-devtools -y && \ + rf""" + ${{MAMBA_EXE}} install -p {env_prefix} r-base{r_pin} r-irkernel r-devtools -y && \ ${{MAMBA_EXE}} clean --all -f -y && \ - ${{MAMBA_EXE}} list -p {0} - """.format( - env_prefix, r_pin - ), + ${{MAMBA_EXE}} list -p {env_prefix} + """, ) ) scripts += rstudio_base_scripts(self.r_version) scripts += [ ( "root", - r""" + rf""" echo auth-none=1 >> /etc/rstudio/rserver.conf && \ echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \ - echo "rsession-which-r={}/bin/R" >> /etc/rstudio/rserver.conf && \ + echo "rsession-which-r={env_prefix}/bin/R" >> /etc/rstudio/rserver.conf && \ echo www-frame-origin=same >> /etc/rstudio/rserver.conf - """.format( - env_prefix - ), + """, ), ( "${NB_USER}", # Register the jupyter kernel - r""" - R --quiet -e "IRkernel::installspec(prefix='{}')" - """.format( - env_prefix - ), + rf""" + R --quiet -e "IRkernel::installspec(prefix='{env_prefix}')" + """, ), ] return scripts diff --git a/repo2docker/buildpacks/docker.py b/repo2docker/buildpacks/docker.py index e0ccfc95..6d8bd469 100644 --- a/repo2docker/buildpacks/docker.py +++ b/repo2docker/buildpacks/docker.py @@ -34,8 +34,8 @@ class DockerBuildPack(BuildPack): # buildpacks/base.py where it is duplicated if not isinstance(memory_limit, int): raise ValueError( - "The memory limit has to be specified as an" - "integer but is '{}'".format(type(memory_limit)) + "The memory limit has to be specified as an " + f"integer but is '{type(memory_limit)}'" ) limits = {} if memory_limit: diff --git a/repo2docker/buildpacks/nix/__init__.py b/repo2docker/buildpacks/nix/__init__.py index b7ca105d..4947b92e 100644 --- a/repo2docker/buildpacks/nix/__init__.py +++ b/repo2docker/buildpacks/nix/__init__.py @@ -62,13 +62,11 @@ class NixBuildPack(BaseImage): return super().get_assemble_scripts() + [ ( "${NB_USER}", - """ + f""" nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs && \ nix-channel --update && \ - nix-shell {} - """.format( - self.binder_path("default.nix") - ), + nix-shell {self.binder_path("default.nix")} + """, ) ] diff --git a/repo2docker/buildpacks/pipfile/__init__.py b/repo2docker/buildpacks/pipfile/__init__.py index e928d01e..f08a1beb 100644 --- a/repo2docker/buildpacks/pipfile/__init__.py +++ b/repo2docker/buildpacks/pipfile/__init__.py @@ -123,9 +123,7 @@ class PipfileBuildPack(CondaBuildPack): assemble_scripts.append( ( "${NB_USER}", - '${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format( - nb_requirements_file - ), + f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"', ) ) diff --git a/repo2docker/buildpacks/python/__init__.py b/repo2docker/buildpacks/python/__init__.py index 0c12f91b..bdcaa520 100644 --- a/repo2docker/buildpacks/python/__init__.py +++ b/repo2docker/buildpacks/python/__init__.py @@ -55,9 +55,7 @@ class PythonBuildPack(CondaBuildPack): "${NB_USER}", # want the $NB_PYHTON_PREFIX environment variable, not for # Python's string formatting to try and replace this - '${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format( - nb_requirements_file - ), + f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"', ) ) diff --git a/repo2docker/buildpacks/r.py b/repo2docker/buildpacks/r.py index 9009a4d8..3156913f 100644 --- a/repo2docker/buildpacks/r.py +++ b/repo2docker/buildpacks/r.py @@ -336,12 +336,10 @@ class RBuildPack(PythonBuildPack): ( "${NB_USER}", # Install a pinned version of devtools, IRKernel and shiny - r""" - R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{devtools_cran_mirror_url}')" && \ + rf""" + R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{self.get_devtools_snapshot_url()}')" && \ R --quiet -e "IRkernel::installspec(prefix='$NB_PYTHON_PREFIX')" - """.format( - devtools_cran_mirror_url=self.get_devtools_snapshot_url() - ), + """, ), ] @@ -374,8 +372,7 @@ class RBuildPack(PythonBuildPack): "${NB_USER}", # Delete /tmp/downloaded_packages only if install.R fails, as the second # invocation of install.R might be able to reuse them - "Rscript %s && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages" - % installR_path, + f"Rscript {installR_path} && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages", ) ] @@ -392,9 +389,7 @@ class RBuildPack(PythonBuildPack): "${NB_USER}", # only run install.R if the pre-assembly failed # Delete any downloaded packages in /tmp, as they aren't reused by R - """if [ ! -f /tmp/.preassembled ]; then Rscript {}; rm -rf /tmp/downloaded_packages; fi""".format( - installR_path - ), + f"""if [ ! -f /tmp/.preassembled ]; then Rscript {installR_path}; rm -rf /tmp/downloaded_packages; fi""", ) ] diff --git a/repo2docker/contentproviders/base.py b/repo2docker/contentproviders/base.py index f4f1cbbb..672c3973 100644 --- a/repo2docker/contentproviders/base.py +++ b/repo2docker/contentproviders/base.py @@ -68,6 +68,6 @@ class Local(ContentProvider): def fetch(self, spec, output_dir, yield_output=False): # nothing to be done if your content is already in the output directory - msg = "Local content provider assumes {} == {}".format(spec["path"], output_dir) + msg = f'Local content provider assumes {spec["path"]} == {output_dir}' assert output_dir == spec["path"], msg - yield "Using local repo {}.\n".format(spec["path"]) + yield f'Using local repo {spec["path"]}.\n' diff --git a/repo2docker/contentproviders/dataverse.py b/repo2docker/contentproviders/dataverse.py index c308c2d0..e6e7da38 100644 --- a/repo2docker/contentproviders/dataverse.py +++ b/repo2docker/contentproviders/dataverse.py @@ -76,9 +76,7 @@ class Dataverse(DoiProvider): data = self.urlopen(search_url).json()["data"] if data["count_in_response"] != 1: self.log.debug( - "Dataverse search query failed!\n - doi: {}\n - url: {}\n - resp: {}\n".format( - doi, url, json.dump(data) - ) + f"Dataverse search query failed!\n - doi: {doi}\n - url: {url}\n - resp: {json.dump(data)}\n" ) return @@ -98,16 +96,14 @@ class Dataverse(DoiProvider): host = spec["host"] yield f"Fetching Dataverse record {record_id}.\n" - url = "{}/api/datasets/:persistentId?persistentId={}".format( - host["url"], record_id - ) + url = f'{host["url"]}/api/datasets/:persistentId?persistentId={record_id}' resp = self.urlopen(url, headers={"accept": "application/json"}) record = resp.json()["data"] for fobj in deep_get(record, "latestVersion.files"): - file_url = "{}/api/access/datafile/{}".format( - host["url"], deep_get(fobj, "dataFile.id") + file_url = ( + f'{host["url"]}/api/access/datafile/{deep_get(fobj, "dataFile.id")}' ) filename = os.path.join(fobj.get("directoryLabel", ""), fobj["label"]) diff --git a/repo2docker/contentproviders/figshare.py b/repo2docker/contentproviders/figshare.py index 5d27684d..0735e441 100644 --- a/repo2docker/contentproviders/figshare.py +++ b/repo2docker/contentproviders/figshare.py @@ -75,11 +75,9 @@ class Figshare(DoiProvider): article_version = spec["version"] host = spec["host"] - yield "Fetching Figshare article {} in version {}.\n".format( - article_id, article_version - ) + yield f"Fetching Figshare article {article_id} in version {article_version}.\n" resp = self.urlopen( - "{}{}/versions/{}".format(host["api"], article_id, article_version), + f'{host["api"]}{article_id}/versions/{article_version}', headers={"accept": "application/json"}, ) diff --git a/repo2docker/contentproviders/git.py b/repo2docker/contentproviders/git.py index 9e5fb48e..a3d330e6 100644 --- a/repo2docker/contentproviders/git.py +++ b/repo2docker/contentproviders/git.py @@ -43,7 +43,7 @@ class Git(ContentProvider): hash = check_ref(ref, output_dir) if hash is None: self.log.error( - "Failed to check out ref %s", ref, extra=dict(phase=R2dState.FAILED) + f"Failed to check out ref {ref}", extra=dict(phase=R2dState.FAILED) ) if ref == "master": msg = ( diff --git a/repo2docker/contentproviders/hydroshare.py b/repo2docker/contentproviders/hydroshare.py index 3378129a..886a8f81 100755 --- a/repo2docker/contentproviders/hydroshare.py +++ b/repo2docker/contentproviders/hydroshare.py @@ -59,7 +59,7 @@ class Hydroshare(DoiProvider): resource_id = spec["resource"] host = spec["host"] - bag_url = "{}{}".format(host["django_irods"], resource_id) + bag_url = f'{host["django_irods"]}{resource_id}' yield f"Downloading {bag_url}.\n" @@ -76,9 +76,7 @@ class Hydroshare(DoiProvider): msg = "Bag taking too long to prepare, exiting now, try again later." yield msg raise ContentProviderException(msg) - yield "Bag is being prepared, requesting again in {} seconds.\n".format( - wait_time - ) + yield f"Bag is being prepared, requesting again in {wait_time} seconds.\n" time.sleep(wait_time) conn = self.urlopen(bag_url) if conn.status_code != 200: diff --git a/repo2docker/contentproviders/mercurial.py b/repo2docker/contentproviders/mercurial.py index e29584f2..821f55f3 100644 --- a/repo2docker/contentproviders/mercurial.py +++ b/repo2docker/contentproviders/mercurial.py @@ -60,7 +60,7 @@ class Mercurial(ContentProvider): ) except subprocess.CalledProcessError: self.log.error( - "Failed to update to ref %s", ref, extra=dict(phase=R2dState.FAILED) + f"Failed to update to ref {ref}", extra=dict(phase=R2dState.FAILED) ) raise ValueError(f"Failed to update to ref {ref}") diff --git a/repo2docker/contentproviders/zenodo.py b/repo2docker/contentproviders/zenodo.py index a58e295f..0a8d9108 100644 --- a/repo2docker/contentproviders/zenodo.py +++ b/repo2docker/contentproviders/zenodo.py @@ -68,7 +68,7 @@ class Zenodo(DoiProvider): yield f"Fetching Zenodo record {record_id}.\n" resp = self.urlopen( - "{}{}".format(host["api"], record_id), + f'{host["api"]}{record_id}', headers={"accept": "application/json"}, ) diff --git a/repo2docker/utils.py b/repo2docker/utils.py index e8833bd8..852f21e2 100644 --- a/repo2docker/utils.py +++ b/repo2docker/utils.py @@ -136,13 +136,10 @@ def validate_and_generate_port_mapping(port_mappings): try: p = int(port) except ValueError as e: - raise ValueError( - 'Port specification "{}" has ' "an invalid port.".format(mapping) - ) + raise ValueError(f'Port specification "{mapping}" has an invalid port.') if not 0 < p <= 65535: raise ValueError( - 'Port specification "{}" specifies ' - "a port outside 1-65535.".format(mapping) + f'Port specification "{mapping}" specifies a port outside 1-65535.' ) return port @@ -152,8 +149,7 @@ def validate_and_generate_port_mapping(port_mappings): port, protocol = parts if protocol not in ("tcp", "udp"): raise ValueError( - 'Port specification "{}" has ' - "an invalid protocol.".format(mapping) + f'Port specification "{mapping}" has an invalid protocol.' ) elif len(parts) == 1: port = parts[0] @@ -310,14 +306,14 @@ class ByteSpecification(Integer): num = float(value[:-1]) except ValueError: raise TraitError( - "{val} is not a valid memory specification. " - "Must be an int or a string with suffix K, M, G, T".format(val=value) + f"{value} is not a valid memory specification. " + "Must be an int or a string with suffix K, M, G, T" ) suffix = value[-1] if suffix not in self.UNIT_SUFFIXES: raise TraitError( - "{val} is not a valid memory specification. " - "Must be an int or a string with suffix K, M, G, T".format(val=value) + f"{value} is not a valid memory specification. " + "Must be an int or a string with suffix K, M, G, T" ) else: return int(float(num) * self.UNIT_SUFFIXES[suffix]) diff --git a/tests/conda/py2/verify b/tests/conda/py2/verify index b251755a..4a55686c 100755 --- a/tests/conda/py2/verify +++ b/tests/conda/py2/verify @@ -30,4 +30,4 @@ for pkg in pkgs: assert pkg["version"].startswith("2.7.") break else: - assert False, "python not found in %s" % pkg_names + assert False, f"python not found in {pkg_names}" diff --git a/tests/conftest.py b/tests/conftest.py index eda58223..e2914801 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -65,7 +65,7 @@ def make_test_func(args, skip_build=False, extra_run_kwargs=None): container = app.start_container() port = app.port # wait a bit for the container to be ready - container_url = "http://localhost:%s/api" % port + container_url = f"http://localhost:{port}/api" # give the container a chance to start time.sleep(1) try: @@ -77,13 +77,13 @@ def make_test_func(args, skip_build=False, extra_run_kwargs=None): try: info = requests.get(container_url).json() except Exception as e: - print("Error: %s" % e) + print(f"Error: {e}") time.sleep(i * 3) else: print(info) success = True break - assert success, "Notebook never started in %s" % container + assert success, f"Notebook never started in {container}" finally: # stop the container container.stop() @@ -211,7 +211,7 @@ class Repo2DockerTest(pytest.Function): def repr_failure(self, excinfo): err = excinfo.value if isinstance(err, SystemExit): - cmd = "jupyter-repo2docker %s" % " ".join(map(pipes.quote, self.args)) + cmd = f'jupyter-repo2docker {" ".join(map(pipes.quote, self.args))}' return f"{cmd} | exited with status={err.code}" else: return super().repr_failure(excinfo) diff --git a/tests/unit/test_connect_url.py b/tests/unit/test_connect_url.py index 3115a8ab..7438b8aa 100644 --- a/tests/unit/test_connect_url.py +++ b/tests/unit/test_connect_url.py @@ -59,13 +59,13 @@ def test_connect_url(tmpdir): try: info = requests.get(container_url).json() except Exception as e: - print("Error: %s" % e) + print(f"Error: {e}") time.sleep(i * 3) else: print(info) success = True break - assert success, "Notebook never started in %s" % container + assert success, f"Notebook never started in {container}" finally: # stop the container container.stop() diff --git a/tests/unit/test_subdir.py b/tests/unit/test_subdir.py index 43d08fce..4b0cd47b 100644 --- a/tests/unit/test_subdir.py +++ b/tests/unit/test_subdir.py @@ -21,7 +21,7 @@ def test_subdir(run_repo2docker): run_repo2docker(argv) # check that we restored the current working directory - assert cwd == os.getcwd(), "We should be back in %s" % cwd + assert cwd == os.getcwd(), f"We should be back in {cwd}" def test_subdir_in_image_name(): diff --git a/tests/unit/test_users.py b/tests/unit/test_users.py index e14eeaf1..642e4f3b 100644 --- a/tests/unit/test_users.py +++ b/tests/unit/test_users.py @@ -34,19 +34,14 @@ def test_user(): subprocess.check_call( [ "repo2docker", - "-v", - f"{tmpdir}:/home/{username}", - "--user-id", - userid, - "--user-name", - username, + f"--volume={tmpdir}:/home/{username}", + f"--user-id={userid}", + f"--user-name={username}", tmpdir, "--", "/bin/bash", "-c", - "id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user".format( - ts - ), + "id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user", ] ) diff --git a/tests/venv/usr-bin/verify b/tests/venv/usr-bin/verify index 4370b68e..c6efe782 100755 --- a/tests/venv/usr-bin/verify +++ b/tests/venv/usr-bin/verify @@ -4,4 +4,4 @@ import os assert os.path.expanduser("~/.local/bin") in os.getenv("PATH"), os.getenv("PATH") assert os.getcwd() == os.environ["REPO_DIR"] -assert "{}/.local/bin".format(os.environ["REPO_DIR"]) in os.getenv("PATH") +assert f'{os.environ["REPO_DIR"]}/.local/bin' in os.getenv("PATH") diff --git a/versioneer.py b/versioneer.py index a4708899..03a56057 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1106,7 +1106,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + pieces["error"] = f"unable to parse git-describe output: '{describe_out}'" return pieces # tag @@ -1115,10 +1115,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( - full_tag, - tag_prefix, - ) + pieces[ + "error" + ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'" return pieces pieces["closest-tag"] = full_tag[len(tag_prefix) :] @@ -1256,7 +1255,7 @@ def write_to_version_file(filename, versions): with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) - print("set {} to '{}'".format(filename, versions["version"])) + print(f"set {filename} to '{versions['version']}'") def plus_or_dot(pieces):