[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
pull/1202/head
pre-commit-ci[bot] 2022-10-23 16:09:11 +00:00 zatwierdzone przez Erik Sundell
rodzic ce44f98c9a
commit 5a93542321
36 zmienionych plików z 139 dodań i 167 usunięć

Wyświetl plik

@ -52,7 +52,7 @@ class MimicDockerEnvHandling(argparse.Action):
# key pass using current value, or don't pass
if "=" not in values:
try:
value_to_append = "{}={}".format(values, os.environ[values])
value_to_append = f"{values}={os.environ[values]}"
except KeyError:
# no local def, so don't pass
return

Wyświetl plik

@ -84,7 +84,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
stderr=(subprocess.PIPE if hide_stderr else None),
)
break
except EnvironmentError:
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@ -94,7 +94,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
print(f"unable to find command, tried {commands}")
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
@ -147,7 +147,7 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
f = open(versionfile_abs)
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
@ -162,7 +162,7 @@ def git_get_keywords(versionfile_abs):
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
except OSError:
pass
return keywords
@ -186,11 +186,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@ -199,7 +199,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r"\d", r)])
tags = {r for r in refs if re.search(r"\d", r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@ -302,7 +302,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format(
full_tag,
tag_prefix,
)

Wyświetl plik

@ -568,7 +568,7 @@ class Repo2Docker(Application):
)
last_emit_time = time.time()
self.log.info(
"Successfully pushed {}".format(self.output_image_spec),
f"Successfully pushed {self.output_image_spec}",
extra=dict(phase=R2dState.PUSHING),
)
@ -767,7 +767,7 @@ class Repo2Docker(Application):
self.subdir,
extra=dict(phase=R2dState.FAILED),
)
raise FileNotFoundError("Could not find {}".format(checkout_path))
raise FileNotFoundError(f"Could not find {checkout_path}")
with chdir(checkout_path):
for BP in self.buildpacks:

Wyświetl plik

@ -462,7 +462,7 @@ class BuildPack:
last_user = "root"
for user, script in self.get_build_scripts():
if last_user != user:
build_script_directives.append("USER {}".format(user))
build_script_directives.append(f"USER {user}")
last_user = user
build_script_directives.append(
"RUN {}".format(textwrap.dedent(script.strip("\n")))
@ -472,7 +472,7 @@ class BuildPack:
last_user = "root"
for user, script in self.get_assemble_scripts():
if last_user != user:
assemble_script_directives.append("USER {}".format(user))
assemble_script_directives.append(f"USER {user}")
last_user = user
assemble_script_directives.append(
"RUN {}".format(textwrap.dedent(script.strip("\n")))
@ -482,7 +482,7 @@ class BuildPack:
last_user = "root"
for user, script in self.get_preassemble_scripts():
if last_user != user:
preassemble_script_directives.append("USER {}".format(user))
preassemble_script_directives.append(f"USER {user}")
last_user = user
preassemble_script_directives.append(
"RUN {}".format(textwrap.dedent(script.strip("\n")))
@ -616,8 +616,7 @@ class BuildPack:
build_kwargs.update(extra_build_kwargs)
for line in client.build(**build_kwargs):
yield line
yield from client.build(**build_kwargs)
class BaseImage(BuildPack):

Wyświetl plik

@ -377,7 +377,7 @@ class CondaBuildPack(BaseImage):
r"""
echo auth-none=1 >> /etc/rstudio/rserver.conf && \
echo auth-minimum-user-id=0 >> /etc/rstudio/rserver.conf && \
echo "rsession-which-r={0}/bin/R" >> /etc/rstudio/rserver.conf && \
echo "rsession-which-r={}/bin/R" >> /etc/rstudio/rserver.conf && \
echo www-frame-origin=same >> /etc/rstudio/rserver.conf
""".format(
env_prefix
@ -387,7 +387,7 @@ class CondaBuildPack(BaseImage):
"${NB_USER}",
# Register the jupyter kernel
r"""
R --quiet -e "IRkernel::installspec(prefix='{0}')"
R --quiet -e "IRkernel::installspec(prefix='{}')"
""".format(
env_prefix
),

Wyświetl plik

@ -57,5 +57,4 @@ class DockerBuildPack(BuildPack):
build_kwargs.update(extra_build_kwargs)
for line in client.build(**build_kwargs):
yield line
yield from client.build(**build_kwargs)

Wyświetl plik

@ -20,7 +20,7 @@ class LegacyBinderDockerBuildPack:
"""Check if current repo should be built with the Legacy BuildPack."""
log = logging.getLogger("repo2docker")
try:
with open("Dockerfile", "r") as f:
with open("Dockerfile") as f:
for line in f:
if line.startswith("FROM"):
if "andrewosh/binder-base" in line.split("#")[0].lower():

Wyświetl plik

@ -67,7 +67,7 @@ class PythonBuildPack(CondaBuildPack):
scripts.append(
(
"${NB_USER}",
'{} install --no-cache-dir -r "{}"'.format(pip, requirements_file),
f'{pip} install --no-cache-dir -r "{requirements_file}"',
)
)
return scripts
@ -126,9 +126,7 @@ class PythonBuildPack(CondaBuildPack):
# setup.py exists *and* binder dir is not used
if not self.binder_dir and os.path.exists(setup_py):
assemble_scripts.append(
("${NB_USER}", "{} install --no-cache-dir .".format(pip))
)
assemble_scripts.append(("${NB_USER}", f"{pip} install --no-cache-dir ."))
return assemble_scripts
def detect(self):

Wyświetl plik

@ -139,7 +139,7 @@ class RBuildPack(PythonBuildPack):
self._checkpoint_date = datetime.date.today() - datetime.timedelta(
days=2
)
self._runtime = "r-{}".format(str(self._checkpoint_date))
self._runtime = f"r-{str(self._checkpoint_date)}"
return True
def get_env(self):
@ -223,7 +223,7 @@ class RBuildPack(PythonBuildPack):
for i in range(max_days_prior):
try_date = snapshot_date - datetime.timedelta(days=i)
# Fall back to MRAN if packagemanager.rstudio.com doesn't have it
url = "https://mran.microsoft.com/snapshot/{}".format(try_date.isoformat())
url = f"https://mran.microsoft.com/snapshot/{try_date.isoformat()}"
r = requests.head(url)
if r.ok:
return url

Wyświetl plik

@ -20,7 +20,7 @@ class Dataverse(DoiProvider):
def __init__(self):
data_file = os.path.join(os.path.dirname(__file__), "dataverse.json")
with open(data_file, "r") as fp:
with open(data_file) as fp:
self.hosts = json.load(fp)["installations"]
super().__init__()
@ -97,7 +97,7 @@ class Dataverse(DoiProvider):
record_id = spec["record"]
host = spec["host"]
yield "Fetching Dataverse record {}.\n".format(record_id)
yield f"Fetching Dataverse record {record_id}.\n"
url = "{}/api/datasets/:persistentId?persistentId={}".format(
host["url"], record_id
)
@ -114,8 +114,7 @@ class Dataverse(DoiProvider):
file_ref = {"download": file_url, "filename": filename}
fetch_map = {key: key for key in file_ref.keys()}
for line in self.fetch_file(file_ref, fetch_map, output_dir):
yield line
yield from self.fetch_file(file_ref, fetch_map, output_dir)
new_subdirs = os.listdir(output_dir)
# if there is only one new subdirectory move its contents

Wyświetl plik

@ -23,7 +23,7 @@ class DoiProvider(ContentProvider):
self.session = Session()
self.session.headers.update(
{
"user-agent": "repo2docker {}".format(__version__),
"user-agent": f"repo2docker {__version__}",
}
)
@ -38,7 +38,7 @@ class DoiProvider(ContentProvider):
if not isinstance(req, request.Request):
req = request.Request(req)
req.add_header("User-Agent", "repo2docker {}".format(__version__))
req.add_header("User-Agent", f"repo2docker {__version__}")
if headers is not None:
for key, value in headers.items():
req.add_header(key, value)
@ -52,7 +52,7 @@ class DoiProvider(ContentProvider):
doi = normalize_doi(doi)
try:
resp = self._request("https://doi.org/{}".format(doi))
resp = self._request(f"https://doi.org/{doi}")
resp.raise_for_status()
# If the DOI doesn't resolve, just return URL
except HTTPError:
@ -67,26 +67,26 @@ class DoiProvider(ContentProvider):
# file related to a record
file_url = deep_get(file_ref, host["download"])
fname = deep_get(file_ref, host["filename"])
logging.debug("Downloading file {} as {}\n".format(file_url, fname))
logging.debug(f"Downloading file {file_url} as {fname}\n")
yield "Requesting {}\n".format(file_url)
yield f"Requesting {file_url}\n"
resp = self._request(file_url, stream=True)
resp.raise_for_status()
if path.dirname(fname):
sub_dir = path.join(output_dir, path.dirname(fname))
if not path.exists(sub_dir):
yield "Creating {}\n".format(sub_dir)
yield f"Creating {sub_dir}\n"
makedirs(sub_dir, exist_ok=True)
dst_fname = path.join(output_dir, fname)
with open(dst_fname, "wb") as dst:
yield "Fetching {}\n".format(fname)
yield f"Fetching {fname}\n"
for chunk in resp.iter_content(chunk_size=None):
dst.write(chunk)
if unzip and is_zipfile(dst_fname):
yield "Extracting {}\n".format(fname)
yield f"Extracting {fname}\n"
zfile = ZipFile(dst_fname)
zfile.extractall(path=output_dir)
zfile.close()
@ -106,4 +106,4 @@ class DoiProvider(ContentProvider):
copytree(path.join(output_dir, d), output_dir)
shutil.rmtree(path.join(output_dir, d))
yield "Fetched files: {}\n".format(os.listdir(output_dir))
yield f"Fetched files: {os.listdir(output_dir)}\n"

Wyświetl plik

@ -91,10 +91,9 @@ class Figshare(DoiProvider):
only_one_file = len(files) == 1
for file_ref in files:
unzip = file_ref["name"].endswith(".zip") and only_one_file
for line in self.fetch_file(file_ref, host, output_dir, unzip):
yield line
yield from self.fetch_file(file_ref, host, output_dir, unzip)
@property
def content_id(self):
"""The Figshare article ID"""
return "{}.v{}".format(self.article_id, self.article_version)
return f"{self.article_id}.v{self.article_version}"

Wyświetl plik

@ -29,13 +29,12 @@ class Git(ContentProvider):
# this prevents HEAD's submodules to be cloned if ref doesn't have them
cmd.extend(["--no-checkout"])
cmd.extend([repo, output_dir])
for line in execute_cmd(cmd, capture=yield_output):
yield line
yield from execute_cmd(cmd, capture=yield_output)
except subprocess.CalledProcessError as e:
msg = "Failed to clone repository from {repo}".format(repo=repo)
msg = f"Failed to clone repository from {repo}"
if ref != "HEAD":
msg += " (ref {ref})".format(ref=ref)
msg += f" (ref {ref})"
msg += "."
raise ContentProviderException(msg) from e
@ -54,23 +53,21 @@ class Git(ContentProvider):
"specifying `--ref`."
)
else:
msg = "Failed to check out ref {}".format(ref)
msg = f"Failed to check out ref {ref}"
raise ValueError(msg)
# We don't need to explicitly checkout things as the reset will
# take care of that. If the hash is resolved above, we should be
# able to reset to it
for line in execute_cmd(
yield from execute_cmd(
["git", "reset", "--hard", hash], cwd=output_dir, capture=yield_output
):
yield line
)
# ensure that git submodules are initialised and updated
for line in execute_cmd(
yield from execute_cmd(
["git", "submodule", "update", "--init", "--recursive"],
cwd=output_dir,
capture=yield_output,
):
yield line
)
cmd = ["git", "rev-parse", "HEAD"]
sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir)

Wyświetl plik

@ -61,7 +61,7 @@ class Hydroshare(DoiProvider):
bag_url = "{}{}".format(host["django_irods"], resource_id)
yield "Downloading {}.\n".format(bag_url)
yield f"Downloading {bag_url}.\n"
# bag downloads are prepared on demand and may need some time
conn = self.urlopen(bag_url)
@ -82,7 +82,7 @@ class Hydroshare(DoiProvider):
time.sleep(wait_time)
conn = self.urlopen(bag_url)
if conn.status_code != 200:
msg = "Failed to download bag. status code {}.\n".format(conn.status_code)
msg = f"Failed to download bag. status code {conn.status_code}.\n"
yield msg
raise ContentProviderException(msg)
# Bag creation seems to need a small time buffer after it says it's ready.
@ -102,4 +102,4 @@ class Hydroshare(DoiProvider):
@property
def content_id(self):
"""The HydroShare resource ID"""
return "{}.v{}".format(self.resource_id, self.version)
return f"{self.resource_id}.v{self.version}"

Wyświetl plik

@ -41,8 +41,7 @@ class Mercurial(ContentProvider):
# don't update so the clone will include an empty working
# directory, the given ref will be updated out later
cmd.extend(["--noupdate"])
for line in execute_cmd(cmd, capture=yield_output):
yield line
yield from execute_cmd(cmd, capture=yield_output)
except subprocess.CalledProcessError as error:
msg = f"Failed to clone repository from {repo}"
@ -54,17 +53,16 @@ class Mercurial(ContentProvider):
# check out the specific ref given by the user
if ref is not None:
try:
for line in execute_cmd(
yield from execute_cmd(
["hg", "update", "--clean", ref] + args_enabling_topic,
cwd=output_dir,
capture=yield_output,
):
yield line
)
except subprocess.CalledProcessError:
self.log.error(
"Failed to update to ref %s", ref, extra=dict(phase=R2dState.FAILED)
)
raise ValueError("Failed to update to ref {}".format(ref))
raise ValueError(f"Failed to update to ref {ref}")
cmd = ["hg", "identify", "-i"] + args_enabling_topic
sha1 = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=output_dir)

Wyświetl plik

@ -34,12 +34,12 @@ class Swhid(ContentProvider):
self.session = requests.Session()
self.session.headers.update(
{
"user-agent": "repo2docker {}".format(__version__),
"user-agent": f"repo2docker {__version__}",
}
)
def set_auth_token(self, token):
header = {"Authorization": "Bearer {}".format(token)}
header = {"Authorization": f"Bearer {token}"}
self.session.headers.update(header)
def _request(self, url, method="GET"):
@ -72,8 +72,8 @@ class Swhid(ContentProvider):
return {"swhid": swhid, "swhid_obj": swhid_dict}
def fetch_directory(self, dir_hash, output_dir):
url = "{}/vault/directory/{}/".format(self.base_url, dir_hash)
yield "Fetching directory {} from {}\n".format(dir_hash, url)
url = f"{self.base_url}/vault/directory/{dir_hash}/"
yield f"Fetching directory {dir_hash} from {url}\n"
resp = self._request(url, "POST")
receipt = resp.json()
status = receipt["status"]
@ -92,7 +92,7 @@ class Swhid(ContentProvider):
# move its content one level up
copytree(path.join(output_dir, dir_hash), output_dir)
shutil.rmtree(path.join(output_dir, dir_hash))
yield "Fetched files: {}\n".format(os.listdir(output_dir))
yield f"Fetched files: {os.listdir(output_dir)}\n"
def fetch(self, spec, output_dir, yield_output=False):
swhid = spec["swhid"]
@ -101,12 +101,12 @@ class Swhid(ContentProvider):
if swhid_obj["type"] == "rev":
# need to get the directory for this revision
sha1git = swhid_obj["hash"]
url = "{}/revision/{}/".format(self.base_url, sha1git)
yield "Fetching revision {} from {}\n".format(sha1git, url)
url = f"{self.base_url}/revision/{sha1git}/"
yield f"Fetching revision {sha1git} from {url}\n"
resp = self._request(url)
assert resp.ok, (resp.content, self.session.headers)
directory = resp.json()["directory"]
self.swhid = "swh:1:dir:{}".format(directory)
self.swhid = f"swh:1:dir:{directory}"
yield from self.fetch_directory(directory, output_dir)
elif swhid_obj["type"] == "dir":
self.swhid = swhid

Wyświetl plik

@ -66,7 +66,7 @@ class Zenodo(DoiProvider):
record_id = spec["record"]
host = spec["host"]
yield "Fetching Zenodo record {}.\n".format(record_id)
yield f"Fetching Zenodo record {record_id}.\n"
resp = self.urlopen(
"{}{}".format(host["api"], record_id),
headers={"accept": "application/json"},
@ -77,10 +77,7 @@ class Zenodo(DoiProvider):
files = deep_get(record, host["filepath"])
only_one_file = len(files) == 1
for file_ref in files:
for line in self.fetch_file(
file_ref, host, output_dir, unzip=only_one_file
):
yield line
yield from self.fetch_file(file_ref, host, output_dir, unzip=only_one_file)
@property
def content_id(self):

Wyświetl plik

@ -131,7 +131,7 @@ class Image:
return self._config
def __repr__(self):
return "Image(tags={},config={})".format(self.tags, self.config)
return f"Image(tags={self.tags},config={self.config})"
class ContainerEngine(LoggingConfigurable):

Wyświetl plik

@ -212,7 +212,7 @@ class Repo2DockerTest(pytest.Function):
err = excinfo.value
if isinstance(err, SystemExit):
cmd = "jupyter-repo2docker %s" % " ".join(map(pipes.quote, self.args))
return "%s | exited with status=%s" % (cmd, err.code)
return f"{cmd} | exited with status={err.code}"
else:
return super().repr_failure(excinfo)

Wyświetl plik

@ -20,7 +20,7 @@ with open("mem_allocate_mb") as f:
mem_allocate_mb = int(f.read().strip())
size = 1024 * 1024 * mem_allocate_mb
print("trying to allocate {}MB".format(mem_allocate_mb))
print(f"trying to allocate {mem_allocate_mb}MB")
ret = libc.malloc(size)

Wyświetl plik

@ -20,7 +20,7 @@ with open("mem_allocate_mb") as f:
mem_allocate_mb = int(f.read().strip())
size = 1024 * 1024 * mem_allocate_mb
print("trying to allocate {}MB".format(mem_allocate_mb))
print(f"trying to allocate {mem_allocate_mb}MB")
ret = libc.malloc(size)

Wyświetl plik

@ -12,8 +12,8 @@ from repo2docker.contentproviders import Dataverse
test_dv = Dataverse()
harvard_dv = next((_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse"))
cimmyt_dv = next((_ for _ in test_dv.hosts if _["name"] == "CIMMYT Research Data"))
harvard_dv = next(_ for _ in test_dv.hosts if _["name"] == "Harvard Dataverse")
cimmyt_dv = next(_ for _ in test_dv.hosts if _["name"] == "CIMMYT Research Data")
test_hosts = [
(
[
@ -153,7 +153,7 @@ def test_dataverse_fetch(dv_files, requests_mock):
for l in dv.fetch(spec, d):
output.append(l)
unpacked_files = set(os.listdir(d))
expected = set(["directory", "some-file.txt"])
expected = {"directory", "some-file.txt"}
assert expected == unpacked_files
assert os.path.isfile(
os.path.join(d, "directory", "subdirectory", "the-other-file.txt")

Wyświetl plik

@ -27,7 +27,7 @@ def test_url_headers(requests_mock):
result = doi.urlopen("https://mybinder.org", headers=headers)
assert "test1" in result.request.headers
assert "Test2" in result.request.headers
assert result.request.headers["User-Agent"] == "repo2docker {}".format(__version__)
assert result.request.headers["User-Agent"] == f"repo2docker {__version__}"
def test_unresolving_doi():

Wyświetl plik

@ -113,8 +113,8 @@ def test_detect_not_figshare():
def figshare_archive(prefix="a_directory"):
with NamedTemporaryFile(suffix=".zip") as zfile:
with ZipFile(zfile.name, mode="w") as zip:
zip.writestr("{}/some-file.txt".format(prefix), "some content")
zip.writestr("{}/some-other-file.txt".format(prefix), "some more content")
zip.writestr(f"{prefix}/some-file.txt", "some content")
zip.writestr(f"{prefix}/some-other-file.txt", "some more content")
yield zfile.name
@ -127,7 +127,7 @@ def test_fetch_zip(requests_mock):
{
"name": "afake.zip",
"is_link_only": False,
"download_url": "file://{}".format(fig_path),
"download_url": f"file://{fig_path}",
}
]
}
@ -135,9 +135,7 @@ def test_fetch_zip(requests_mock):
"https://api.figshare.com/v2/articles/123456/versions/42",
json=mock_response,
)
requests_mock.get(
"file://{}".format(fig_path), content=open(fig_path, "rb").read()
)
requests_mock.get(f"file://{fig_path}", content=open(fig_path, "rb").read())
# with patch.object(Figshare, "urlopen", new=mock_urlopen):
with TemporaryDirectory() as d:
@ -146,7 +144,7 @@ def test_fetch_zip(requests_mock):
output.append(l)
unpacked_files = set(os.listdir(d))
expected = set(["some-other-file.txt", "some-file.txt"])
expected = {"some-other-file.txt", "some-file.txt"}
assert expected == unpacked_files
@ -157,12 +155,12 @@ def test_fetch_data(requests_mock):
"files": [
{
"name": "afake.file",
"download_url": "file://{}".format(a_path),
"download_url": f"file://{a_path}",
"is_link_only": False,
},
{
"name": "bfake.data",
"download_url": "file://{}".format(b_path),
"download_url": f"file://{b_path}",
"is_link_only": False,
},
{"name": "cfake.link", "is_link_only": True},
@ -173,12 +171,8 @@ def test_fetch_data(requests_mock):
"https://api.figshare.com/v2/articles/123456/versions/42",
json=mock_response,
)
requests_mock.get(
"file://{}".format(a_path), content=open(a_path, "rb").read()
)
requests_mock.get(
"file://{}".format(b_path), content=open(b_path, "rb").read()
)
requests_mock.get(f"file://{a_path}", content=open(a_path, "rb").read())
requests_mock.get(f"file://{b_path}", content=open(b_path, "rb").read())
with TemporaryDirectory() as d:
output = []

Wyświetl plik

@ -103,8 +103,8 @@ def test_detect_hydroshare(requests_mock):
def hydroshare_archive(prefix="b8f6eae9d89241cf8b5904033460af61/data/contents"):
with NamedTemporaryFile(suffix=".zip") as zfile:
with ZipFile(zfile.name, mode="w") as zip:
zip.writestr("{}/some-file.txt".format(prefix), "some content")
zip.writestr("{}/some-other-file.txt".format(prefix), "some more content")
zip.writestr(f"{prefix}/some-file.txt", "some content")
zip.writestr(f"{prefix}/some-other-file.txt", "some more content")
yield zfile
@ -149,7 +149,7 @@ def test_fetch_bag():
output.append(l)
unpacked_files = set(os.listdir(d))
expected = set(["some-other-file.txt", "some-file.txt"])
expected = {"some-other-file.txt", "some-file.txt"}
assert expected == unpacked_files

Wyświetl plik

@ -99,7 +99,7 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf):
adapter.register_uri(
"GET",
"mock://api/1/revision/{}/".format(NULLID),
f"mock://api/1/revision/{NULLID}/",
json={
"author": {"fullname": "John Doe <jdoe@example.com>"},
"directory": dirhash,
@ -107,25 +107,25 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf):
)
adapter.register_uri(
"POST",
"mock://api/1/vault/directory/{}/".format(dirhash),
f"mock://api/1/vault/directory/{dirhash}/",
json={
"fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash),
"fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/",
"status": "new",
},
)
adapter.register_uri(
"GET",
"mock://api/1/vault/directory/{}/".format(dirhash),
f"mock://api/1/vault/directory/{dirhash}/",
[
{
"json": {
"fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash),
"fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/",
"status": "pending",
}
},
{
"json": {
"fetch_url": "mock://api/1/vault/directory/{}/raw/".format(dirhash),
"fetch_url": f"mock://api/1/vault/directory/{dirhash}/raw/",
"status": "done",
}
},
@ -133,7 +133,7 @@ def mocked_provider(tmpdir, dirhash, tarfile_buf):
)
adapter.register_uri(
"GET",
"mock://api/1/vault/directory/{}/raw/".format(dirhash),
f"mock://api/1/vault/directory/{dirhash}/raw/",
content=tarfile_buf,
)
return provider

Wyświetl plik

@ -82,8 +82,8 @@ def test_detect_zenodo(test_input, expected, requests_mock):
def zenodo_archive(prefix="a_directory"):
with NamedTemporaryFile(suffix=".zip") as zfile:
with ZipFile(zfile.name, mode="w") as zip:
zip.writestr("{}/some-file.txt".format(prefix), "some content")
zip.writestr("{}/some-other-file.txt".format(prefix), "some more content")
zip.writestr(f"{prefix}/some-file.txt", "some content")
zip.writestr(f"{prefix}/some-other-file.txt", "some more content")
yield zfile.name
@ -96,15 +96,13 @@ def test_fetch_software_from_github_archive(requests_mock):
"files": [
{
"filename": "some_dir/afake.zip",
"links": {"download": "file://{}".format(zen_path)},
"links": {"download": f"file://{zen_path}"},
}
],
"metadata": {"upload_type": "other"},
}
requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response)
requests_mock.get(
"file://{}".format(zen_path), content=open(zen_path, "rb").read()
)
requests_mock.get(f"file://{zen_path}", content=open(zen_path, "rb").read())
zen = Zenodo()
spec = {"host": test_zen.hosts[1], "record": "1234"}
@ -115,7 +113,7 @@ def test_fetch_software_from_github_archive(requests_mock):
output.append(l)
unpacked_files = set(os.listdir(d))
expected = set(["some-other-file.txt", "some-file.txt"])
expected = {"some-other-file.txt", "some-file.txt"}
assert expected == unpacked_files
@ -129,15 +127,13 @@ def test_fetch_software(requests_mock):
# this is the difference to the GitHub generated one,
# the ZIP file isn't in a directory
"filename": "afake.zip",
"links": {"download": "file://{}".format(zen_path)},
"links": {"download": f"file://{zen_path}"},
}
],
"metadata": {"upload_type": "software"},
}
requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response)
requests_mock.get(
"file://{}".format(zen_path), content=open(zen_path, "rb").read()
)
requests_mock.get(f"file://{zen_path}", content=open(zen_path, "rb").read())
with TemporaryDirectory() as d:
zen = Zenodo()
@ -147,7 +143,7 @@ def test_fetch_software(requests_mock):
output.append(l)
unpacked_files = set(os.listdir(d))
expected = set(["some-other-file.txt", "some-file.txt"])
expected = {"some-other-file.txt", "some-file.txt"}
assert expected == unpacked_files
@ -159,21 +155,21 @@ def test_fetch_data(requests_mock):
"files": [
{
"filename": "afake.zip",
"links": {"download": "file://{}".format(a_zen_path)},
"links": {"download": f"file://{a_zen_path}"},
},
{
"filename": "bfake.zip",
"links": {"download": "file://{}".format(b_zen_path)},
"links": {"download": f"file://{b_zen_path}"},
},
],
"metadata": {"upload_type": "data"},
}
requests_mock.get("https://zenodo.org/api/records/1234", json=mock_response)
requests_mock.get(
"file://{}".format(a_zen_path), content=open(a_zen_path, "rb").read()
f"file://{a_zen_path}", content=open(a_zen_path, "rb").read()
)
requests_mock.get(
"file://{}".format(b_zen_path), content=open(b_zen_path, "rb").read()
f"file://{b_zen_path}", content=open(b_zen_path, "rb").read()
)
with TemporaryDirectory() as d:

Wyświetl plik

@ -13,7 +13,7 @@ def test_version(capsys):
"""
with pytest.raises(SystemExit):
make_r2d(["--version"])
assert capsys.readouterr().out == "{}\n".format(__version__)
assert capsys.readouterr().out == f"{__version__}\n"
def test_simple():

Wyświetl plik

@ -40,8 +40,8 @@ def test_connect_url(tmpdir):
app.start()
container = app.start_container()
container_url = "http://{}:{}/api".format(app.hostname, app.port)
expected_url = "http://{}:{}".format(app.hostname, app.port)
container_url = f"http://{app.hostname}:{app.port}/api"
expected_url = f"http://{app.hostname}:{app.port}"
# wait a bit for the container to be ready
# give the container a chance to start

Wyświetl plik

@ -30,7 +30,7 @@ def test_env(capfd):
"repo2docker",
# 'key=value' are exported as is in docker
"-e",
"FOO={}".format(ts),
f"FOO={ts}",
"--env",
"BAR=baz",
# 'key' is exported with the currently exported value
@ -65,7 +65,7 @@ def test_env(capfd):
# stderr should contain lines of output
declares = [x for x in captured.err.splitlines() if x.startswith("declare")]
assert 'declare -x FOO="{}"'.format(ts) in declares
assert f'declare -x FOO="{ts}"' in declares
assert 'declare -x BAR="baz"' in declares
assert 'declare -x SPAM="eggs"' in declares
assert "declare -x NO_SPAM" not in declares

Wyświetl plik

@ -82,13 +82,13 @@ def read_port_mapping_response(
if all_ports:
port = port_mapping["8888/tcp"][0]["HostPort"]
url = "http://{}:{}".format(host, port)
url = f"http://{host}:{port}"
for i in range(5):
try:
r = requests.get(url)
r.raise_for_status()
except Exception as e:
print("No response from {}: {}".format(url, e))
print(f"No response from {url}: {e}")
container.reload()
assert container.status == "running"
time.sleep(3)

Wyświetl plik

@ -87,6 +87,7 @@ def test_snapshot_mran_date(requested, expected):
with patch("requests.head", side_effect=mock_request_head):
r = buildpacks.RBuildPack()
assert r.get_mran_snapshot_url(
requested
) == "https://mran.microsoft.com/snapshot/{}".format(expected.isoformat())
assert (
r.get_mran_snapshot_url(requested)
== f"https://mran.microsoft.com/snapshot/{expected.isoformat()}"
)

Wyświetl plik

@ -35,7 +35,7 @@ def test_user():
[
"repo2docker",
"-v",
"{}:/home/{}".format(tmpdir, username),
f"{tmpdir}:/home/{username}",
"--user-id",
userid,
"--user-name",
@ -53,7 +53,7 @@ def test_user():
with open(os.path.join(tmpdir, "id")) as f:
assert f.read().strip() == userid
with open(os.path.join(tmpdir, "pwd")) as f:
assert f.read().strip() == "/home/{}".format(username)
assert f.read().strip() == f"/home/{username}"
with open(os.path.join(tmpdir, "name")) as f:
assert f.read().strip() == username
with open(os.path.join(tmpdir, "name")) as f:

Wyświetl plik

@ -90,7 +90,7 @@ def test_invalid_port_mapping(port_spec):
with pytest.raises(ValueError) as e:
utils.validate_and_generate_port_mapping([port_spec])
assert 'Port specification "{}"'.format(port_spec) in str(e.value)
assert f'Port specification "{port_spec}"' in str(e.value)
def test_deep_get():

Wyświetl plik

@ -22,7 +22,7 @@ def test_volume_abspath():
[
"repo2docker",
"-v",
"{}:/home/{}".format(tmpdir, username),
f"{tmpdir}:/home/{username}",
"--user-id",
str(os.geteuid()),
"--user-name",
@ -31,7 +31,7 @@ def test_volume_abspath():
"--",
"/bin/bash",
"-c",
"echo -n {} > ts".format(ts),
f"echo -n {ts} > ts",
]
)
@ -61,7 +61,7 @@ def test_volume_relpath():
"--",
"/bin/bash",
"-c",
"echo -n {} > ts".format(ts),
f"echo -n {ts} > ts",
]
)

Wyświetl plik

@ -275,7 +275,6 @@ https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
@ -344,7 +343,7 @@ def get_config_from_root(root):
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
with open(setup_cfg) as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
@ -404,7 +403,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
stderr=(subprocess.PIPE if hide_stderr else None),
)
break
except EnvironmentError:
except OSError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
@ -414,7 +413,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
print(f"unable to find command, tried {commands}")
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
@ -429,7 +428,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
LONG_VERSION_PY[
"git"
] = '''
] = r'''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
@ -961,7 +960,7 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
f = open(versionfile_abs)
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
@ -976,7 +975,7 @@ def git_get_keywords(versionfile_abs):
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
except OSError:
pass
return keywords
@ -1000,11 +999,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@ -1013,7 +1012,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r"\d", r)])
tags = {r for r in refs if re.search(r"\d", r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@ -1116,7 +1115,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format(
full_tag,
tag_prefix,
)
@ -1166,13 +1165,13 @@ def do_vcs_install(manifest_in, versionfile_source, ipy):
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
f = open(".gitattributes")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
except OSError:
pass
if not present:
f = open(".gitattributes", "a+")
@ -1236,7 +1235,7 @@ def versions_from_file(filename):
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
except OSError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(
r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
@ -1257,7 +1256,7 @@ def write_to_version_file(filename, versions):
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
print("set {} to '{}'".format(filename, versions["version"]))
def plus_or_dot(pieces):
@ -1482,7 +1481,7 @@ def get_versions(verbose=False):
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
print(f"got version from file {versionfile_abs} {ver}")
return ver
except NotThisMethod:
pass
@ -1755,11 +1754,7 @@ def do_setup():
root = get_root()
try:
cfg = get_config_from_root(root)
except (
EnvironmentError,
configparser.NoSectionError,
configparser.NoOptionError,
) as e:
except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
@ -1784,9 +1779,9 @@ def do_setup():
ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
with open(ipy) as f:
old = f.read()
except EnvironmentError:
except OSError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
@ -1805,12 +1800,12 @@ def do_setup():
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
with open(manifest_in) as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
except OSError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
@ -1844,7 +1839,7 @@ def scan_setup_py():
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
with open("setup.py") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")