Use context manager instead of plain open (#1211)

Context manager with open closes the files after usage.

When the object is already a pathlib.Path i used read_text
write_text functions

In some cases pathlib.Path.open were used in context manager,
it is basically the same as builtin open.

Thanks, Konstantin Baikov!
pull/1229/head
Konstantin Baikov 2021-03-11 17:15:49 +01:00 zatwierdzone przez GitHub
rodzic a1bcd2fbe5
commit 8e18c79431
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
17 zmienionych plików z 93 dodań i 63 usunięć

Wyświetl plik

@ -212,7 +212,7 @@ class Datasette:
and (config_dir / "inspect-data.json").exists()
and not inspect_data
):
inspect_data = json.load((config_dir / "inspect-data.json").open())
inspect_data = json.loads((config_dir / "inspect-data.json").read_text())
if immutables is None:
immutable_filenames = [i["file"] for i in inspect_data.values()]
immutables = [
@ -269,7 +269,7 @@ class Datasette:
if config_dir and (config_dir / "config.json").exists():
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.load((config_dir / "settings.json").open())
config = json.loads((config_dir / "settings.json").read_text())
self._settings = dict(DEFAULT_SETTINGS, **(config or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
@ -450,11 +450,10 @@ class Datasette:
def app_css_hash(self):
if not hasattr(self, "_app_css_hash"):
self._app_css_hash = hashlib.sha1(
open(os.path.join(str(app_root), "datasette/static/app.css"))
.read()
.encode("utf8")
).hexdigest()[:6]
with open(os.path.join(str(app_root), "datasette/static/app.css")) as fp:
self._app_css_hash = hashlib.sha1(fp.read().encode("utf8")).hexdigest()[
:6
]
return self._app_css_hash
async def get_canned_queries(self, database_name, actor):

Wyświetl plik

@ -125,13 +125,13 @@ def cli():
@sqlite_extensions
def inspect(files, inspect_file, sqlite_extensions):
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
if inspect_file == "-":
out = sys.stdout
else:
out = open(inspect_file, "w")
loop = asyncio.get_event_loop()
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
out.write(json.dumps(inspect_data, indent=2))
if inspect_file == "-":
sys.stdout.write(json.dumps(inspect_data, indent=2))
else:
with open(inspect_file, "w") as fp:
fp.write(json.dumps(inspect_data, indent=2))
async def inspect_(files, sqlite_extensions):
@ -475,7 +475,8 @@ def serve(
inspect_data = None
if inspect_file:
inspect_data = json.load(open(inspect_file))
with open(inspect_file) as fp:
inspect_data = json.load(fp)
metadata_data = None
if metadata:

Wyświetl plik

@ -141,9 +141,11 @@ def publish_subcommand(publish):
if show_files:
if os.path.exists("metadata.json"):
print("=== metadata.json ===\n")
print(open("metadata.json").read())
with open("metadata.json") as fp:
print(fp.read())
print("\n==== Dockerfile ====\n")
print(open("Dockerfile").read())
with open("Dockerfile") as fp:
print(fp.read())
print("\n====================\n")
image_id = f"gcr.io/{project}/{name}"

Wyświetl plik

@ -171,9 +171,11 @@ def temporary_heroku_directory(
os.chdir(tmp.name)
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
with open("metadata.json", "w") as fp:
fp.write(json.dumps(metadata_content, indent=2))
open("runtime.txt", "w").write("python-3.8.7")
with open("runtime.txt", "w") as fp:
fp.write("python-3.8.7")
if branch:
install = [
@ -182,11 +184,11 @@ def temporary_heroku_directory(
else:
install = ["datasette"] + list(install)
open("requirements.txt", "w").write("\n".join(install))
with open("requirements.txt", "w") as fp:
fp.write("\n".join(install))
os.mkdir("bin")
open("bin/post_compile", "w").write(
"datasette inspect --inspect-file inspect-data.json"
)
with open("bin/post_compile", "w") as fp:
fp.write("datasette inspect --inspect-file inspect-data.json")
extras = []
if template_dir:
@ -218,7 +220,8 @@ def temporary_heroku_directory(
procfile_cmd = "web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}".format(
quoted_files=quoted_files, extras=" ".join(extras)
)
open("Procfile", "w").write(procfile_cmd)
with open("Procfile", "w") as fp:
fp.write(procfile_cmd)
for path, filename in zip(file_paths, file_names):
link_or_copy(path, os.path.join(tmp.name, filename))

Wyświetl plik

@ -428,8 +428,10 @@ def temporary_docker_directory(
)
os.chdir(datasette_dir)
if metadata_content:
open("metadata.json", "w").write(json.dumps(metadata_content, indent=2))
open("Dockerfile", "w").write(dockerfile)
with open("metadata.json", "w") as fp:
fp.write(json.dumps(metadata_content, indent=2))
with open("Dockerfile", "w") as fp:
fp.write(dockerfile)
for path, filename in zip(file_paths, file_names):
link_or_copy(path, os.path.join(datasette_dir, filename))
if template_dir:

Wyświetl plik

@ -17,7 +17,8 @@ def get_version():
os.path.dirname(os.path.abspath(__file__)), "datasette", "version.py"
)
g = {}
exec(open(path).read(), g)
with open(path) as fp:
exec(fp.read(), g)
return g["__version__"]

Wyświetl plik

@ -75,10 +75,8 @@ def check_permission_actions_are_documented():
from datasette.plugins import pm
content = (
(pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst")
.open()
.read()
)
pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst"
).read_text()
permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):")
documented_permission_actions = set(permissions_re.findall(content)).union(
UNDOCUMENTED_PERMISSIONS

Wyświetl plik

@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
conn.executescript(GENERATED_COLUMNS_SQL)
print(f"Test tables written to {db_filename}")
if metadata:
open(metadata, "w").write(json.dumps(METADATA, indent=4))
with open(metadata, "w") as fp:
fp.write(json.dumps(METADATA, indent=4))
print(f"- metadata written to {metadata}")
if plugins_path:
path = pathlib.Path(plugins_path)
@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
test_plugins = pathlib.Path(__file__).parent / "plugins"
for filepath in test_plugins.glob("*.py"):
newpath = path / filepath.name
newpath.write_text(filepath.open().read())
newpath.write_text(filepath.read_text())
print(f" Wrote plugin: {newpath}")
if extra_db_filename:
if pathlib.Path(extra_db_filename).exists():

Wyświetl plik

@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client):
cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"]
)
assert 0 == result.exit_code, result.output
data = json.load(open("foo.json"))
with open("foo.json") as fp:
data = json.load(fp)
assert ["fixtures"] == list(data.keys())

Wyświetl plik

@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory):
@hookimpl
def startup(datasette):
open("{}", "w").write("hello")
with open("{}", "w") as fp:
fp.write("hello")
""".format(
str(plugins_dir / "hello.txt")
),

Wyświetl plik

@ -19,13 +19,13 @@ def get_headings(content, underline="-"):
def get_labels(filename):
content = (docs_path / filename).open().read()
content = (docs_path / filename).read_text()
return set(label_re.findall(content))
@pytest.fixture(scope="session")
def settings_headings():
return get_headings((docs_path / "settings.rst").open().read(), "~")
return get_headings((docs_path / "settings.rst").read_text(), "~")
@pytest.mark.parametrize("setting", app.SETTINGS)
@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting):
),
)
def test_help_includes(name, filename):
expected = open(str(docs_path / filename)).read()
expected = (docs_path / filename).read_text()
runner = CliRunner()
result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88)
actual = f"$ datasette {name} --help\n\n{result.output}"
@ -55,7 +55,7 @@ def test_help_includes(name, filename):
@pytest.fixture(scope="session")
def plugin_hooks_content():
return (docs_path / "plugin_hooks.rst").open().read()
return (docs_path / "plugin_hooks.rst").read_text()
@pytest.mark.parametrize(

Wyświetl plik

@ -32,7 +32,8 @@ def test_package(mock_call, mock_which):
capture = CaptureDockerfile()
mock_call.side_effect = capture
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"])
assert 0 == result.exit_code
mock_call.assert_has_calls([mock.call(["docker", "build", "."])])
@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which):
mock_call.side_effect = capture
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"]
)

Wyświetl plik

@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client):
def test_plugin_config_file(app_client):
open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE")
with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp:
fp.write("FROM_FILE")
assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin")
# Ensure secrets aren't visible in /-/metadata.json
metadata = app_client.get("/-/metadata.json")

Wyświetl plik

@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
assert result.exit_code == 1
assert "Publishing to Google Cloud requires gcloud" in result.output
@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service(
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
)
@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
)
@ -120,7 +123,8 @@ def test_publish_cloudrun_memory(
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory],
@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
open("metadata.yml", "w").write(
textwrap.dedent(
"""
with open("test.db", "w") as fp:
fp.write("data")
with open("metadata.yml", "w") as fp:
fp.write(
textwrap.dedent(
"""
title: Hello from metadata YAML
plugins:
datasette-auth-github:
foo: bar
"""
).strip()
)
).strip()
)
result = runner.invoke(
cli.cli,
[
@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[
@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options(
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[

Wyświetl plik

@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert result.exit_code == 1
assert "Publishing to Heroku requires heroku" in result.output
@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which
mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("t.db", "w").write("data")
with open("t.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n")
assert 0 != result.exit_code
mock_check_output.assert_has_calls(
@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"]
)
@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which)
}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[

Wyświetl plik

@ -232,7 +232,8 @@ def test_to_css_class(s, expected):
def test_temporary_docker_directory_uses_hard_link():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open("hello", "w").write("world")
with open("hello", "w") as fp:
fp.write("world")
# Default usage of this should use symlink
with utils.temporary_docker_directory(
files=["hello"],
@ -249,7 +250,8 @@ def test_temporary_docker_directory_uses_hard_link():
secret="secret",
) as temp_docker:
hello = os.path.join(temp_docker, "hello")
assert "world" == open(hello).read()
with open(hello) as fp:
assert "world" == fp.read()
# It should be a hard link
assert 2 == os.stat(hello).st_nlink
@ -260,7 +262,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
mock_link.side_effect = OSError
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open("hello", "w").write("world")
with open("hello", "w") as fp:
fp.write("world")
# Default usage of this should use symlink
with utils.temporary_docker_directory(
files=["hello"],
@ -277,7 +280,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
secret=None,
) as temp_docker:
hello = os.path.join(temp_docker, "hello")
assert "world" == open(hello).read()
with open(hello) as fp:
assert "world" == fp.read()
# It should be a copy, not a hard link
assert 1 == os.stat(hello).st_nlink
@ -285,7 +289,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
def test_temporary_docker_directory_quotes_args():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open("hello", "w").write("world")
with open("hello", "w") as fp:
fp.write("world")
with utils.temporary_docker_directory(
files=["hello"],
name="t",
@ -301,7 +306,8 @@ def test_temporary_docker_directory_quotes_args():
secret="secret",
) as temp_docker:
df = os.path.join(temp_docker, "Dockerfile")
df_contents = open(df).read()
with open(df) as fp:
df_contents = fp.read()
assert "'$PWD'" in df_contents
assert "'--$HOME'" in df_contents
assert "ENV DATASETTE_SECRET 'secret'" in df_contents

Wyświetl plik

@ -18,7 +18,7 @@ def update_help_includes():
result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88)
actual = f"$ datasette {name} --help\n\n{result.output}"
actual = actual.replace("Usage: cli ", "Usage: datasette ")
open(docs_path / filename, "w").write(actual)
(docs_path / filename).write_text(actual)
if __name__ == "__main__":