refactor: manually add transitions to f-strings

pull/1202/head
Erik Sundell 2022-10-23 19:18:37 +02:00
rodzic 5a93542321
commit 15a1fb6e2d
27 zmienionych plików z 90 dodań i 152 usunięć

Wyświetl plik

@ -61,12 +61,10 @@ from repo2docker.buildpacks.conda import CondaBuildPack
default_python = CondaBuildPack.major_pythons["3"] default_python = CondaBuildPack.major_pythons["3"]
rst_prolog = """ rst_prolog = f"""
.. |default_python| replace:: **Python {default_python}** .. |default_python| replace:: **Python {default_python}**
.. |default_python_version| replace:: {default_python} .. |default_python_version| replace:: {default_python}
""".format( """
default_python=default_python
)
# -- Options for HTML output ------------------------------------------------- # -- Options for HTML output -------------------------------------------------

Wyświetl plik

@ -304,8 +304,8 @@ def make_r2d(argv=None):
r2d.volumes[os.path.abspath(args.repo)] = "." r2d.volumes[os.path.abspath(args.repo)] = "."
else: else:
r2d.log.error( r2d.log.error(
'Cannot mount "{}" in editable mode ' f'Cannot mount "{args.repo}" in editable mode '
"as it is not a directory".format(args.repo), "as it is not a directory",
extra=dict(phase=R2dState.FAILED), extra=dict(phase=R2dState.FAILED),
) )
sys.exit(1) sys.exit(1)

Wyświetl plik

@ -293,7 +293,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
if not mo: if not mo:
# unparseable. Maybe git-describe is misbehaving? # unparseable. Maybe git-describe is misbehaving?
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
return pieces return pieces
# tag # tag
@ -302,10 +302,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if verbose: if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'" fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix)) print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( pieces[
full_tag, "error"
tag_prefix, ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
)
return pieces return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :] pieces["closest-tag"] = full_tag[len(tag_prefix) :]

Wyświetl plik

@ -425,9 +425,7 @@ class Repo2Docker(Application):
entry = engines[self.engine] entry = engines[self.engine]
except KeyError: except KeyError:
raise ContainerEngineException( raise ContainerEngineException(
"Container engine '{}' not found. Available engines: {}".format( f"Container engine '{self.engine}' not found. Available engines: {','.join(engines.keys())}"
self.engine, ",".join(engines.keys())
)
) )
engine_class = entry.load() engine_class = entry.load()
return engine_class(parent=self) return engine_class(parent=self)
@ -447,16 +445,11 @@ class Repo2Docker(Application):
spec = cp.detect(url, ref=ref) spec = cp.detect(url, ref=ref)
if spec is not None: if spec is not None:
picked_content_provider = cp picked_content_provider = cp
self.log.info( self.log.info(f"Picked {cp.__class__.__name__} content provider.\n")
"Picked {cp} content "
"provider.\n".format(cp=cp.__class__.__name__)
)
break break
if picked_content_provider is None: if picked_content_provider is None:
self.log.error( self.log.error(f"No matching content provider found for {url}.")
"No matching content provider found for " "{url}.".format(url=url)
)
swh_token = self.config.get("swh_token", self.swh_token) swh_token = self.config.get("swh_token", self.swh_token)
if swh_token and isinstance(picked_content_provider, contentproviders.Swhid): if swh_token and isinstance(picked_content_provider, contentproviders.Swhid):
@ -488,8 +481,7 @@ class Repo2Docker(Application):
Avoids non-JSON output on errors when using --json-logs Avoids non-JSON output on errors when using --json-logs
""" """
self.log.error( self.log.error(
"Error during build: %s", f"Error during build: {evalue}",
evalue,
exc_info=(etype, evalue, traceback), exc_info=(etype, evalue, traceback),
extra=dict(phase=R2dState.FAILED), extra=dict(phase=R2dState.FAILED),
) )
@ -619,11 +611,9 @@ class Repo2Docker(Application):
run_cmd = [ run_cmd = [
"jupyter", "jupyter",
"notebook", "notebook",
"--ip", "--ip=0.0.0.0",
"0.0.0.0", f"--port={container_port}",
"--port", f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}",
container_port,
f"--NotebookApp.custom_display_url=http://{host_name}:{host_port}"
"--NotebookApp.default_url=/lab", "--NotebookApp.default_url=/lab",
] ]
else: else:
@ -730,7 +720,7 @@ class Repo2Docker(Application):
try: try:
docker_client = self.get_engine() docker_client = self.get_engine()
except ContainerEngineException as e: except ContainerEngineException as e:
self.log.error("\nContainer engine initialization error: %s\n", e) self.log.error(f"\nContainer engine initialization error: {e}\n")
self.exit(1) self.exit(1)
# If the source to be executed is a directory, continue using the # If the source to be executed is a directory, continue using the
@ -751,8 +741,7 @@ class Repo2Docker(Application):
if self.find_image(): if self.find_image():
self.log.info( self.log.info(
"Reusing existing image ({}), not " f"Reusing existing image ({self.output_image_spec}), not building."
"building.".format(self.output_image_spec)
) )
# no need to build, so skip to the end by `return`ing here # no need to build, so skip to the end by `return`ing here
# this will still execute the finally clause and let's us # this will still execute the finally clause and let's us
@ -763,8 +752,7 @@ class Repo2Docker(Application):
checkout_path = os.path.join(checkout_path, self.subdir) checkout_path = os.path.join(checkout_path, self.subdir)
if not os.path.isdir(checkout_path): if not os.path.isdir(checkout_path):
self.log.error( self.log.error(
"Subdirectory %s does not exist", f"Subdirectory {self.subdir} does not exist",
self.subdir,
extra=dict(phase=R2dState.FAILED), extra=dict(phase=R2dState.FAILED),
) )
raise FileNotFoundError(f"Could not find {checkout_path}") raise FileNotFoundError(f"Could not find {checkout_path}")
@ -808,8 +796,7 @@ class Repo2Docker(Application):
) )
self.log.info( self.log.info(
"Using %s builder\n", f"Using {bp.__class__.__name__} builder\n",
bp.__class__.__name__,
extra=dict(phase=R2dState.BUILDING), extra=dict(phase=R2dState.BUILDING),
) )

Wyświetl plik

@ -26,7 +26,7 @@ def rstudio_base_scripts(r_version):
# we should have --no-install-recommends on all our apt-get install commands, # we should have --no-install-recommends on all our apt-get install commands,
# but here it's important because these recommend r-base, # but here it's important because these recommend r-base,
# which will upgrade the installed version of R, undoing our pinned version # which will upgrade the installed version of R, undoing our pinned version
r""" rf"""
curl --silent --location --fail {rstudio_url} > /tmp/rstudio.deb && \ curl --silent --location --fail {rstudio_url} > /tmp/rstudio.deb && \
curl --silent --location --fail {shiny_server_url} > /tmp/shiny.deb && \ curl --silent --location --fail {shiny_server_url} > /tmp/shiny.deb && \
echo '{rstudio_sha256sum} /tmp/rstudio.deb' | sha256sum -c - && \ echo '{rstudio_sha256sum} /tmp/rstudio.deb' | sha256sum -c - && \
@ -37,24 +37,16 @@ def rstudio_base_scripts(r_version):
apt-get -qq purge && \ apt-get -qq purge && \
apt-get -qq clean && \ apt-get -qq clean && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
""".format( """,
rstudio_url=rstudio_url,
rstudio_sha256sum=rstudio_sha256sum,
shiny_server_url=shiny_server_url,
shiny_sha256sum=shiny_sha256sum,
),
), ),
( (
"${NB_USER}", "${NB_USER}",
# Install jupyter-rsession-proxy # Install jupyter-rsession-proxy
r""" rf"""
pip install --no-cache \ pip install --no-cache \
jupyter-rsession-proxy=={rsession_proxy_version} \ jupyter-rsession-proxy=={rsession_proxy_version} \
jupyter-shiny-proxy=={shiny_proxy_version} jupyter-shiny-proxy=={shiny_proxy_version}
""".format( """,
rsession_proxy_version=rsession_proxy_version,
shiny_proxy_version=shiny_proxy_version,
),
), ),
( (
# Not all of these locations are configurable; so we make sure # Not all of these locations are configurable; so we make sure

Wyświetl plik

@ -594,8 +594,8 @@ class BuildPack:
# buildpacks/docker.py where it is duplicated # buildpacks/docker.py where it is duplicated
if not isinstance(memory_limit, int): if not isinstance(memory_limit, int):
raise ValueError( raise ValueError(
"The memory limit has to be specified as an" "The memory limit has to be specified as an "
"integer but is '{}'".format(type(memory_limit)) f"integer but is '{type(memory_limit)}'"
) )
limits = {} limits = {}
if memory_limit: if memory_limit:
@ -647,8 +647,7 @@ class BaseImage(BuildPack):
# FIXME: Add support for specifying version numbers # FIXME: Add support for specifying version numbers
if not re.match(r"^[a-z0-9.+-]+", package): if not re.match(r"^[a-z0-9.+-]+", package):
raise ValueError( raise ValueError(
"Found invalid package name {} in " f"Found invalid package name {package} in apt.txt"
"apt.txt".format(package)
) )
extra_apt_packages.append(package) extra_apt_packages.append(package)

Wyświetl plik

@ -341,15 +341,13 @@ class CondaBuildPack(BaseImage):
scripts.append( scripts.append(
( (
"${NB_USER}", "${NB_USER}",
r""" rf"""
TIMEFORMAT='time: %3R' \ TIMEFORMAT='time: %3R' \
bash -c 'time ${{MAMBA_EXE}} env update -p {0} --file "{1}" && \ bash -c 'time ${{MAMBA_EXE}} env update -p {env_prefix} --file "{environment_yml}" && \
time ${{MAMBA_EXE}} clean --all -f -y && \ time ${{MAMBA_EXE}} clean --all -f -y && \
${{MAMBA_EXE}} list -p {0} \ ${{MAMBA_EXE}} list -p {env_prefix} \
' '
""".format( """,
env_prefix, environment_yml
),
) )
) )
@ -361,36 +359,30 @@ class CondaBuildPack(BaseImage):
scripts.append( scripts.append(
( (
"${NB_USER}", "${NB_USER}",
r""" rf"""
${{MAMBA_EXE}} install -p {0} r-base{1} r-irkernel r-devtools -y && \ ${{MAMBA_EXE}} install -p {env_prefix} r-base{r_pin} r-irkernel r-devtools -y && \
${{MAMBA_EXE}} clean --all -f -y && \ ${{MAMBA_EXE}} clean --all -f -y && \
${{MAMBA_EXE}} list -p {0} ${{MAMBA_EXE}} list -p {env_prefix}
""".format( """,
env_prefix, r_pin
),
) )
) )
scripts += rstudio_base_scripts(self.r_version) scripts += rstudio_base_scripts(self.r_version)
scripts += [ scripts += [
( (
"root", "root",
r""" rf"""
echo auth-none=1 >> /etc/rstudio/rserver.conf && \ echo auth-none=1 >> /etc/rstudio/rserver.conf && \
echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \ echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \
echo "rsession-which-r={}/bin/R" >> /etc/rstudio/rserver.conf && \ echo "rsession-which-r={env_prefix}/bin/R" >> /etc/rstudio/rserver.conf && \
echo www-frame-origin=same >> /etc/rstudio/rserver.conf echo www-frame-origin=same >> /etc/rstudio/rserver.conf
""".format( """,
env_prefix
),
), ),
( (
"${NB_USER}", "${NB_USER}",
# Register the jupyter kernel # Register the jupyter kernel
r""" rf"""
R --quiet -e "IRkernel::installspec(prefix='{}')" R --quiet -e "IRkernel::installspec(prefix='{env_prefix}')"
""".format( """,
env_prefix
),
), ),
] ]
return scripts return scripts

Wyświetl plik

@ -34,8 +34,8 @@ class DockerBuildPack(BuildPack):
# buildpacks/base.py where it is duplicated # buildpacks/base.py where it is duplicated
if not isinstance(memory_limit, int): if not isinstance(memory_limit, int):
raise ValueError( raise ValueError(
"The memory limit has to be specified as an" "The memory limit has to be specified as an "
"integer but is '{}'".format(type(memory_limit)) f"integer but is '{type(memory_limit)}'"
) )
limits = {} limits = {}
if memory_limit: if memory_limit:

Wyświetl plik

@ -62,13 +62,11 @@ class NixBuildPack(BaseImage):
return super().get_assemble_scripts() + [ return super().get_assemble_scripts() + [
( (
"${NB_USER}", "${NB_USER}",
""" f"""
nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs && \ nix-channel --add https://nixos.org/channels/nixpkgs-unstable nixpkgs && \
nix-channel --update && \ nix-channel --update && \
nix-shell {} nix-shell {self.binder_path("default.nix")}
""".format( """,
self.binder_path("default.nix")
),
) )
] ]

Wyświetl plik

@ -123,9 +123,7 @@ class PipfileBuildPack(CondaBuildPack):
assemble_scripts.append( assemble_scripts.append(
( (
"${NB_USER}", "${NB_USER}",
'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format( f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"',
nb_requirements_file
),
) )
) )

Wyświetl plik

@ -55,9 +55,7 @@ class PythonBuildPack(CondaBuildPack):
"${NB_USER}", "${NB_USER}",
# want the $NB_PYHTON_PREFIX environment variable, not for # want the $NB_PYHTON_PREFIX environment variable, not for
# Python's string formatting to try and replace this # Python's string formatting to try and replace this
'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{}"'.format( f'${{NB_PYTHON_PREFIX}}/bin/pip install --no-cache-dir -r "{nb_requirements_file}"',
nb_requirements_file
),
) )
) )

Wyświetl plik

@ -336,12 +336,10 @@ class RBuildPack(PythonBuildPack):
( (
"${NB_USER}", "${NB_USER}",
# Install a pinned version of devtools, IRKernel and shiny # Install a pinned version of devtools, IRKernel and shiny
r""" rf"""
R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{devtools_cran_mirror_url}')" && \ R --quiet -e "install.packages(c('devtools', 'IRkernel', 'shiny'), repos='{self.get_devtools_snapshot_url()}')" && \
R --quiet -e "IRkernel::installspec(prefix='$NB_PYTHON_PREFIX')" R --quiet -e "IRkernel::installspec(prefix='$NB_PYTHON_PREFIX')"
""".format( """,
devtools_cran_mirror_url=self.get_devtools_snapshot_url()
),
), ),
] ]
@ -374,8 +372,7 @@ class RBuildPack(PythonBuildPack):
"${NB_USER}", "${NB_USER}",
# Delete /tmp/downloaded_packages only if install.R fails, as the second # Delete /tmp/downloaded_packages only if install.R fails, as the second
# invocation of install.R might be able to reuse them # invocation of install.R might be able to reuse them
"Rscript %s && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages" f"Rscript {installR_path} && touch /tmp/.preassembled || true && rm -rf /tmp/downloaded_packages",
% installR_path,
) )
] ]
@ -392,9 +389,7 @@ class RBuildPack(PythonBuildPack):
"${NB_USER}", "${NB_USER}",
# only run install.R if the pre-assembly failed # only run install.R if the pre-assembly failed
# Delete any downloaded packages in /tmp, as they aren't reused by R # Delete any downloaded packages in /tmp, as they aren't reused by R
"""if [ ! -f /tmp/.preassembled ]; then Rscript {}; rm -rf /tmp/downloaded_packages; fi""".format( f"""if [ ! -f /tmp/.preassembled ]; then Rscript {installR_path}; rm -rf /tmp/downloaded_packages; fi""",
installR_path
),
) )
] ]

Wyświetl plik

@ -68,6 +68,6 @@ class Local(ContentProvider):
def fetch(self, spec, output_dir, yield_output=False): def fetch(self, spec, output_dir, yield_output=False):
# nothing to be done if your content is already in the output directory # nothing to be done if your content is already in the output directory
msg = "Local content provider assumes {} == {}".format(spec["path"], output_dir) msg = f'Local content provider assumes {spec["path"]} == {output_dir}'
assert output_dir == spec["path"], msg assert output_dir == spec["path"], msg
yield "Using local repo {}.\n".format(spec["path"]) yield f'Using local repo {spec["path"]}.\n'

Wyświetl plik

@ -76,9 +76,7 @@ class Dataverse(DoiProvider):
data = self.urlopen(search_url).json()["data"] data = self.urlopen(search_url).json()["data"]
if data["count_in_response"] != 1: if data["count_in_response"] != 1:
self.log.debug( self.log.debug(
"Dataverse search query failed!\n - doi: {}\n - url: {}\n - resp: {}\n".format( f"Dataverse search query failed!\n - doi: {doi}\n - url: {url}\n - resp: {json.dump(data)}\n"
doi, url, json.dump(data)
)
) )
return return
@ -98,16 +96,14 @@ class Dataverse(DoiProvider):
host = spec["host"] host = spec["host"]
yield f"Fetching Dataverse record {record_id}.\n" yield f"Fetching Dataverse record {record_id}.\n"
url = "{}/api/datasets/:persistentId?persistentId={}".format( url = f'{host["url"]}/api/datasets/:persistentId?persistentId={record_id}'
host["url"], record_id
)
resp = self.urlopen(url, headers={"accept": "application/json"}) resp = self.urlopen(url, headers={"accept": "application/json"})
record = resp.json()["data"] record = resp.json()["data"]
for fobj in deep_get(record, "latestVersion.files"): for fobj in deep_get(record, "latestVersion.files"):
file_url = "{}/api/access/datafile/{}".format( file_url = (
host["url"], deep_get(fobj, "dataFile.id") f'{host["url"]}/api/access/datafile/{deep_get(fobj, "dataFile.id")}'
) )
filename = os.path.join(fobj.get("directoryLabel", ""), fobj["label"]) filename = os.path.join(fobj.get("directoryLabel", ""), fobj["label"])

Wyświetl plik

@ -75,11 +75,9 @@ class Figshare(DoiProvider):
article_version = spec["version"] article_version = spec["version"]
host = spec["host"] host = spec["host"]
yield "Fetching Figshare article {} in version {}.\n".format( yield f"Fetching Figshare article {article_id} in version {article_version}.\n"
article_id, article_version
)
resp = self.urlopen( resp = self.urlopen(
"{}{}/versions/{}".format(host["api"], article_id, article_version), f'{host["api"]}{article_id}/versions/{article_version}',
headers={"accept": "application/json"}, headers={"accept": "application/json"},
) )

Wyświetl plik

@ -43,7 +43,7 @@ class Git(ContentProvider):
hash = check_ref(ref, output_dir) hash = check_ref(ref, output_dir)
if hash is None: if hash is None:
self.log.error( self.log.error(
"Failed to check out ref %s", ref, extra=dict(phase=R2dState.FAILED) f"Failed to check out ref {ref}", extra=dict(phase=R2dState.FAILED)
) )
if ref == "master": if ref == "master":
msg = ( msg = (

Wyświetl plik

@ -59,7 +59,7 @@ class Hydroshare(DoiProvider):
resource_id = spec["resource"] resource_id = spec["resource"]
host = spec["host"] host = spec["host"]
bag_url = "{}{}".format(host["django_irods"], resource_id) bag_url = f'{host["django_irods"]}{resource_id}'
yield f"Downloading {bag_url}.\n" yield f"Downloading {bag_url}.\n"
@ -76,9 +76,7 @@ class Hydroshare(DoiProvider):
msg = "Bag taking too long to prepare, exiting now, try again later." msg = "Bag taking too long to prepare, exiting now, try again later."
yield msg yield msg
raise ContentProviderException(msg) raise ContentProviderException(msg)
yield "Bag is being prepared, requesting again in {} seconds.\n".format( yield f"Bag is being prepared, requesting again in {wait_time} seconds.\n"
wait_time
)
time.sleep(wait_time) time.sleep(wait_time)
conn = self.urlopen(bag_url) conn = self.urlopen(bag_url)
if conn.status_code != 200: if conn.status_code != 200:

Wyświetl plik

@ -60,7 +60,7 @@ class Mercurial(ContentProvider):
) )
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.log.error( self.log.error(
"Failed to update to ref %s", ref, extra=dict(phase=R2dState.FAILED) f"Failed to update to ref {ref}", extra=dict(phase=R2dState.FAILED)
) )
raise ValueError(f"Failed to update to ref {ref}") raise ValueError(f"Failed to update to ref {ref}")

Wyświetl plik

@ -68,7 +68,7 @@ class Zenodo(DoiProvider):
yield f"Fetching Zenodo record {record_id}.\n" yield f"Fetching Zenodo record {record_id}.\n"
resp = self.urlopen( resp = self.urlopen(
"{}{}".format(host["api"], record_id), f'{host["api"]}{record_id}',
headers={"accept": "application/json"}, headers={"accept": "application/json"},
) )

Wyświetl plik

@ -136,13 +136,10 @@ def validate_and_generate_port_mapping(port_mappings):
try: try:
p = int(port) p = int(port)
except ValueError as e: except ValueError as e:
raise ValueError( raise ValueError(f'Port specification "{mapping}" has an invalid port.')
'Port specification "{}" has ' "an invalid port.".format(mapping)
)
if not 0 < p <= 65535: if not 0 < p <= 65535:
raise ValueError( raise ValueError(
'Port specification "{}" specifies ' f'Port specification "{mapping}" specifies a port outside 1-65535.'
"a port outside 1-65535.".format(mapping)
) )
return port return port
@ -152,8 +149,7 @@ def validate_and_generate_port_mapping(port_mappings):
port, protocol = parts port, protocol = parts
if protocol not in ("tcp", "udp"): if protocol not in ("tcp", "udp"):
raise ValueError( raise ValueError(
'Port specification "{}" has ' f'Port specification "{mapping}" has an invalid protocol.'
"an invalid protocol.".format(mapping)
) )
elif len(parts) == 1: elif len(parts) == 1:
port = parts[0] port = parts[0]
@ -310,14 +306,14 @@ class ByteSpecification(Integer):
num = float(value[:-1]) num = float(value[:-1])
except ValueError: except ValueError:
raise TraitError( raise TraitError(
"{val} is not a valid memory specification. " f"{value} is not a valid memory specification. "
"Must be an int or a string with suffix K, M, G, T".format(val=value) "Must be an int or a string with suffix K, M, G, T"
) )
suffix = value[-1] suffix = value[-1]
if suffix not in self.UNIT_SUFFIXES: if suffix not in self.UNIT_SUFFIXES:
raise TraitError( raise TraitError(
"{val} is not a valid memory specification. " f"{value} is not a valid memory specification. "
"Must be an int or a string with suffix K, M, G, T".format(val=value) "Must be an int or a string with suffix K, M, G, T"
) )
else: else:
return int(float(num) * self.UNIT_SUFFIXES[suffix]) return int(float(num) * self.UNIT_SUFFIXES[suffix])

Wyświetl plik

@ -30,4 +30,4 @@ for pkg in pkgs:
assert pkg["version"].startswith("2.7.") assert pkg["version"].startswith("2.7.")
break break
else: else:
assert False, "python not found in %s" % pkg_names assert False, f"python not found in {pkg_names}"

Wyświetl plik

@ -65,7 +65,7 @@ def make_test_func(args, skip_build=False, extra_run_kwargs=None):
container = app.start_container() container = app.start_container()
port = app.port port = app.port
# wait a bit for the container to be ready # wait a bit for the container to be ready
container_url = "http://localhost:%s/api" % port container_url = f"http://localhost:{port}/api"
# give the container a chance to start # give the container a chance to start
time.sleep(1) time.sleep(1)
try: try:
@ -77,13 +77,13 @@ def make_test_func(args, skip_build=False, extra_run_kwargs=None):
try: try:
info = requests.get(container_url).json() info = requests.get(container_url).json()
except Exception as e: except Exception as e:
print("Error: %s" % e) print(f"Error: {e}")
time.sleep(i * 3) time.sleep(i * 3)
else: else:
print(info) print(info)
success = True success = True
break break
assert success, "Notebook never started in %s" % container assert success, f"Notebook never started in {container}"
finally: finally:
# stop the container # stop the container
container.stop() container.stop()
@ -211,7 +211,7 @@ class Repo2DockerTest(pytest.Function):
def repr_failure(self, excinfo): def repr_failure(self, excinfo):
err = excinfo.value err = excinfo.value
if isinstance(err, SystemExit): if isinstance(err, SystemExit):
cmd = "jupyter-repo2docker %s" % " ".join(map(pipes.quote, self.args)) cmd = f'jupyter-repo2docker {" ".join(map(pipes.quote, self.args))}'
return f"{cmd} | exited with status={err.code}" return f"{cmd} | exited with status={err.code}"
else: else:
return super().repr_failure(excinfo) return super().repr_failure(excinfo)

Wyświetl plik

@ -59,13 +59,13 @@ def test_connect_url(tmpdir):
try: try:
info = requests.get(container_url).json() info = requests.get(container_url).json()
except Exception as e: except Exception as e:
print("Error: %s" % e) print(f"Error: {e}")
time.sleep(i * 3) time.sleep(i * 3)
else: else:
print(info) print(info)
success = True success = True
break break
assert success, "Notebook never started in %s" % container assert success, f"Notebook never started in {container}"
finally: finally:
# stop the container # stop the container
container.stop() container.stop()

Wyświetl plik

@ -21,7 +21,7 @@ def test_subdir(run_repo2docker):
run_repo2docker(argv) run_repo2docker(argv)
# check that we restored the current working directory # check that we restored the current working directory
assert cwd == os.getcwd(), "We should be back in %s" % cwd assert cwd == os.getcwd(), f"We should be back in {cwd}"
def test_subdir_in_image_name(): def test_subdir_in_image_name():

Wyświetl plik

@ -34,19 +34,14 @@ def test_user():
subprocess.check_call( subprocess.check_call(
[ [
"repo2docker", "repo2docker",
"-v", f"--volume={tmpdir}:/home/{username}",
f"{tmpdir}:/home/{username}", f"--user-id={userid}",
"--user-id", f"--user-name={username}",
userid,
"--user-name",
username,
tmpdir, tmpdir,
"--", "--",
"/bin/bash", "/bin/bash",
"-c", "-c",
"id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user".format( "id -u > id && pwd > pwd && whoami > name && echo -n $USER > env_user",
ts
),
] ]
) )

Wyświetl plik

@ -4,4 +4,4 @@ import os
assert os.path.expanduser("~/.local/bin") in os.getenv("PATH"), os.getenv("PATH") assert os.path.expanduser("~/.local/bin") in os.getenv("PATH"), os.getenv("PATH")
assert os.getcwd() == os.environ["REPO_DIR"] assert os.getcwd() == os.environ["REPO_DIR"]
assert "{}/.local/bin".format(os.environ["REPO_DIR"]) in os.getenv("PATH") assert f'{os.environ["REPO_DIR"]}/.local/bin' in os.getenv("PATH")

Wyświetl plik

@ -1106,7 +1106,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
if not mo: if not mo:
# unparseable. Maybe git-describe is misbehaving? # unparseable. Maybe git-describe is misbehaving?
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out pieces["error"] = f"unable to parse git-describe output: '{describe_out}'"
return pieces return pieces
# tag # tag
@ -1115,10 +1115,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if verbose: if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'" fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix)) print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format( pieces[
full_tag, "error"
tag_prefix, ] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
)
return pieces return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :] pieces["closest-tag"] = full_tag[len(tag_prefix) :]
@ -1256,7 +1255,7 @@ def write_to_version_file(filename, versions):
with open(filename, "w") as f: with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents) f.write(SHORT_VERSION_PY % contents)
print("set {} to '{}'".format(filename, versions["version"])) print(f"set {filename} to '{versions['version']}'")
def plus_or_dot(pieces): def plus_or_dot(pieces):