kopia lustrzana https://github.com/simonw/datasette
Modernize code to Python 3.6+ (#1158)
* Compact dict and set building * Remove redundant parentheses * Simplify chained conditions * Change method name to lowercase * Use triple double quotes for docstrings Thanks, @eumiro!pull/1159/head
rodzic
90eba4c3ca
commit
a882d67962
|
@ -429,7 +429,7 @@ class Datasette:
|
||||||
return m
|
return m
|
||||||
|
|
||||||
def plugin_config(self, plugin_name, database=None, table=None, fallback=True):
|
def plugin_config(self, plugin_name, database=None, table=None, fallback=True):
|
||||||
"Return config for plugin, falling back from specified database/table"
|
"""Return config for plugin, falling back from specified database/table"""
|
||||||
plugins = self.metadata(
|
plugins = self.metadata(
|
||||||
"plugins", database=database, table=table, fallback=fallback
|
"plugins", database=database, table=table, fallback=fallback
|
||||||
)
|
)
|
||||||
|
@ -523,7 +523,7 @@ class Datasette:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
async def permission_allowed(self, actor, action, resource=None, default=False):
|
async def permission_allowed(self, actor, action, resource=None, default=False):
|
||||||
"Check permissions using the permissions_allowed plugin hook"
|
"""Check permissions using the permissions_allowed plugin hook"""
|
||||||
result = None
|
result = None
|
||||||
for check in pm.hook.permission_allowed(
|
for check in pm.hook.permission_allowed(
|
||||||
datasette=self,
|
datasette=self,
|
||||||
|
@ -570,7 +570,7 @@ class Datasette:
|
||||||
)
|
)
|
||||||
|
|
||||||
async def expand_foreign_keys(self, database, table, column, values):
|
async def expand_foreign_keys(self, database, table, column, values):
|
||||||
"Returns dict mapping (column, value) -> label"
|
"""Returns dict mapping (column, value) -> label"""
|
||||||
labeled_fks = {}
|
labeled_fks = {}
|
||||||
db = self.databases[database]
|
db = self.databases[database]
|
||||||
foreign_keys = await db.foreign_keys_for_table(table)
|
foreign_keys = await db.foreign_keys_for_table(table)
|
||||||
|
@ -613,7 +613,7 @@ class Datasette:
|
||||||
return url
|
return url
|
||||||
|
|
||||||
def _register_custom_units(self):
|
def _register_custom_units(self):
|
||||||
"Register any custom units defined in the metadata.json with Pint"
|
"""Register any custom units defined in the metadata.json with Pint"""
|
||||||
for unit in self.metadata("custom_units") or []:
|
for unit in self.metadata("custom_units") or []:
|
||||||
ureg.define(unit)
|
ureg.define(unit)
|
||||||
|
|
||||||
|
@ -730,7 +730,7 @@ class Datasette:
|
||||||
return {"actor": request.actor}
|
return {"actor": request.actor}
|
||||||
|
|
||||||
def table_metadata(self, database, table):
|
def table_metadata(self, database, table):
|
||||||
"Fetch table-specific metadata."
|
"""Fetch table-specific metadata."""
|
||||||
return (
|
return (
|
||||||
(self.metadata("databases") or {})
|
(self.metadata("databases") or {})
|
||||||
.get(database, {})
|
.get(database, {})
|
||||||
|
@ -739,7 +739,7 @@ class Datasette:
|
||||||
)
|
)
|
||||||
|
|
||||||
def _register_renderers(self):
|
def _register_renderers(self):
|
||||||
""" Register output renderers which output data in custom formats. """
|
"""Register output renderers which output data in custom formats."""
|
||||||
# Built-in renderers
|
# Built-in renderers
|
||||||
self.renderers["json"] = (json_renderer, lambda: True)
|
self.renderers["json"] = (json_renderer, lambda: True)
|
||||||
|
|
||||||
|
@ -880,7 +880,7 @@ class Datasette:
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def app(self):
|
def app(self):
|
||||||
"Returns an ASGI app function that serves the whole of Datasette"
|
"""Returns an ASGI app function that serves the whole of Datasette"""
|
||||||
routes = []
|
routes = []
|
||||||
|
|
||||||
for routes_to_add in pm.hook.register_routes():
|
for routes_to_add in pm.hook.register_routes():
|
||||||
|
@ -1287,7 +1287,7 @@ def permanent_redirect(path):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
_curly_re = re.compile(r"(\{.*?\})")
|
_curly_re = re.compile(r"({.*?})")
|
||||||
|
|
||||||
|
|
||||||
def route_pattern_from_filepath(filepath):
|
def route_pattern_from_filepath(filepath):
|
||||||
|
|
|
@ -152,7 +152,7 @@ async def inspect_(files, sqlite_extensions):
|
||||||
|
|
||||||
@cli.group()
|
@cli.group()
|
||||||
def publish():
|
def publish():
|
||||||
"Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"
|
"""Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ pm.hook.publish_subcommand(publish=publish)
|
||||||
help="Path to directory containing custom plugins",
|
help="Path to directory containing custom plugins",
|
||||||
)
|
)
|
||||||
def plugins(all, plugins_dir):
|
def plugins(all, plugins_dir):
|
||||||
"List currently available plugins"
|
"""List currently available plugins"""
|
||||||
app = Datasette([], plugins_dir=plugins_dir)
|
app = Datasette([], plugins_dir=plugins_dir)
|
||||||
click.echo(json.dumps(app._plugins(all=all), indent=4))
|
click.echo(json.dumps(app._plugins(all=all), indent=4))
|
||||||
|
|
||||||
|
@ -244,7 +244,7 @@ def package(
|
||||||
port,
|
port,
|
||||||
**extra_metadata,
|
**extra_metadata,
|
||||||
):
|
):
|
||||||
"Package specified SQLite files into a new datasette Docker container"
|
"""Package specified SQLite files into a new datasette Docker container"""
|
||||||
if not shutil.which("docker"):
|
if not shutil.which("docker"):
|
||||||
click.secho(
|
click.secho(
|
||||||
' The package command requires "docker" to be installed and configured ',
|
' The package command requires "docker" to be installed and configured ',
|
||||||
|
@ -284,7 +284,7 @@ def package(
|
||||||
"-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version"
|
"-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version"
|
||||||
)
|
)
|
||||||
def install(packages, upgrade):
|
def install(packages, upgrade):
|
||||||
"Install Python packages - e.g. Datasette plugins - into the same environment as Datasette"
|
"""Install Python packages - e.g. Datasette plugins - into the same environment as Datasette"""
|
||||||
args = ["pip", "install"]
|
args = ["pip", "install"]
|
||||||
if upgrade:
|
if upgrade:
|
||||||
args += ["--upgrade"]
|
args += ["--upgrade"]
|
||||||
|
@ -297,7 +297,7 @@ def install(packages, upgrade):
|
||||||
@click.argument("packages", nargs=-1, required=True)
|
@click.argument("packages", nargs=-1, required=True)
|
||||||
@click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation")
|
@click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation")
|
||||||
def uninstall(packages, yes):
|
def uninstall(packages, yes):
|
||||||
"Uninstall Python packages (e.g. plugins) from the Datasette environment"
|
"""Uninstall Python packages (e.g. plugins) from the Datasette environment"""
|
||||||
sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else [])
|
sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else [])
|
||||||
run_module("pip", run_name="__main__")
|
run_module("pip", run_name="__main__")
|
||||||
|
|
||||||
|
|
|
@ -162,10 +162,8 @@ class ColumnFacet(Facet):
|
||||||
)
|
)
|
||||||
num_distinct_values = len(distinct_values)
|
num_distinct_values = len(distinct_values)
|
||||||
if (
|
if (
|
||||||
num_distinct_values
|
1 < num_distinct_values < row_count
|
||||||
and num_distinct_values > 1
|
|
||||||
and num_distinct_values <= facet_size
|
and num_distinct_values <= facet_size
|
||||||
and num_distinct_values < row_count
|
|
||||||
# And at least one has n > 1
|
# And at least one has n > 1
|
||||||
and any(r["n"] > 1 for r in distinct_values)
|
and any(r["n"] > 1 for r in distinct_values)
|
||||||
):
|
):
|
||||||
|
|
|
@ -208,7 +208,7 @@ class Filters:
|
||||||
self.ureg = ureg
|
self.ureg = ureg
|
||||||
|
|
||||||
def lookups(self):
|
def lookups(self):
|
||||||
"Yields (lookup, display, no_argument) pairs"
|
"""Yields (lookup, display, no_argument) pairs"""
|
||||||
for filter in self._filters:
|
for filter in self._filters:
|
||||||
yield filter.key, filter.display, filter.no_argument
|
yield filter.key, filter.display, filter.no_argument
|
||||||
|
|
||||||
|
@ -233,7 +233,7 @@ class Filters:
|
||||||
return f"where {s}"
|
return f"where {s}"
|
||||||
|
|
||||||
def selections(self):
|
def selections(self):
|
||||||
"Yields (column, lookup, value) tuples"
|
"""Yields (column, lookup, value) tuples"""
|
||||||
for key, value in self.pairs:
|
for key, value in self.pairs:
|
||||||
if "__" in key:
|
if "__" in key:
|
||||||
column, lookup = key.rsplit("__", 1)
|
column, lookup = key.rsplit("__", 1)
|
||||||
|
@ -246,7 +246,7 @@ class Filters:
|
||||||
return bool(self.pairs)
|
return bool(self.pairs)
|
||||||
|
|
||||||
def convert_unit(self, column, value):
|
def convert_unit(self, column, value):
|
||||||
"If the user has provided a unit in the query, convert it into the column unit, if present."
|
"""If the user has provided a unit in the query, convert it into the column unit, if present."""
|
||||||
if column not in self.units:
|
if column not in self.units:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
|
@ -7,108 +7,108 @@ hookimpl = HookimplMarker("datasette")
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def startup(datasette):
|
def startup(datasette):
|
||||||
"Fires directly after Datasette first starts running"
|
"""Fires directly after Datasette first starts running"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def asgi_wrapper(datasette):
|
def asgi_wrapper(datasette):
|
||||||
"Returns an ASGI middleware callable to wrap our ASGI application with"
|
"""Returns an ASGI middleware callable to wrap our ASGI application with"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def prepare_connection(conn, database, datasette):
|
def prepare_connection(conn, database, datasette):
|
||||||
"Modify SQLite connection in some way e.g. register custom SQL functions"
|
"""Modify SQLite connection in some way e.g. register custom SQL functions"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def prepare_jinja2_environment(env):
|
def prepare_jinja2_environment(env):
|
||||||
"Modify Jinja2 template environment e.g. register custom template tags"
|
"""Modify Jinja2 template environment e.g. register custom template tags"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_css_urls(template, database, table, columns, view_name, request, datasette):
|
def extra_css_urls(template, database, table, columns, view_name, request, datasette):
|
||||||
"Extra CSS URLs added by this plugin"
|
"""Extra CSS URLs added by this plugin"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_js_urls(template, database, table, columns, view_name, request, datasette):
|
def extra_js_urls(template, database, table, columns, view_name, request, datasette):
|
||||||
"Extra JavaScript URLs added by this plugin"
|
"""Extra JavaScript URLs added by this plugin"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_body_script(
|
def extra_body_script(
|
||||||
template, database, table, columns, view_name, request, datasette
|
template, database, table, columns, view_name, request, datasette
|
||||||
):
|
):
|
||||||
"Extra JavaScript code to be included in <script> at bottom of body"
|
"""Extra JavaScript code to be included in <script> at bottom of body"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def extra_template_vars(
|
def extra_template_vars(
|
||||||
template, database, table, columns, view_name, request, datasette
|
template, database, table, columns, view_name, request, datasette
|
||||||
):
|
):
|
||||||
"Extra template variables to be made available to the template - can return dict or callable or awaitable"
|
"""Extra template variables to be made available to the template - can return dict or callable or awaitable"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def publish_subcommand(publish):
|
def publish_subcommand(publish):
|
||||||
"Subcommands for 'datasette publish'"
|
"""Subcommands for 'datasette publish'"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec(firstresult=True)
|
@hookspec(firstresult=True)
|
||||||
def render_cell(value, column, table, database, datasette):
|
def render_cell(value, column, table, database, datasette):
|
||||||
"Customize rendering of HTML table cell values"
|
"""Customize rendering of HTML table cell values"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def register_output_renderer(datasette):
|
def register_output_renderer(datasette):
|
||||||
"Register a renderer to output data in a different format"
|
"""Register a renderer to output data in a different format"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def register_facet_classes():
|
def register_facet_classes():
|
||||||
"Register Facet subclasses"
|
"""Register Facet subclasses"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def register_routes():
|
def register_routes():
|
||||||
"Register URL routes: return a list of (regex, view_function) pairs"
|
"""Register URL routes: return a list of (regex, view_function) pairs"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def actor_from_request(datasette, request):
|
def actor_from_request(datasette, request):
|
||||||
"Return an actor dictionary based on the incoming request"
|
"""Return an actor dictionary based on the incoming request"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def permission_allowed(datasette, actor, action, resource):
|
def permission_allowed(datasette, actor, action, resource):
|
||||||
"Check if actor is allowed to perfom this action - return True, False or None"
|
"""Check if actor is allowed to perfom this action - return True, False or None"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def canned_queries(datasette, database, actor):
|
def canned_queries(datasette, database, actor):
|
||||||
"Return a dictonary of canned query definitions or an awaitable function that returns them"
|
"""Return a dictonary of canned query definitions or an awaitable function that returns them"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def register_magic_parameters(datasette):
|
def register_magic_parameters(datasette):
|
||||||
"Return a list of (name, function) magic parameter functions"
|
"""Return a list of (name, function) magic parameter functions"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def forbidden(datasette, request, message):
|
def forbidden(datasette, request, message):
|
||||||
"Custom response for a 403 forbidden error"
|
"""Custom response for a 403 forbidden error"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def menu_links(datasette, actor):
|
def menu_links(datasette, actor):
|
||||||
"Links for the navigation menu"
|
"""Links for the navigation menu"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def table_actions(datasette, actor, database, table):
|
def table_actions(datasette, actor, database, table):
|
||||||
"Links for the table actions menu"
|
"""Links for the table actions menu"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec
|
@hookspec
|
||||||
def database_actions(datasette, actor, database):
|
def database_actions(datasette, actor, database):
|
||||||
"Links for the database actions menu"
|
"""Links for the database actions menu"""
|
||||||
|
|
|
@ -15,7 +15,7 @@ HASH_BLOCK_SIZE = 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
def inspect_hash(path):
|
def inspect_hash(path):
|
||||||
" Calculate the hash of a database, efficiently. "
|
"""Calculate the hash of a database, efficiently."""
|
||||||
m = hashlib.sha256()
|
m = hashlib.sha256()
|
||||||
with path.open("rb") as fp:
|
with path.open("rb") as fp:
|
||||||
while True:
|
while True:
|
||||||
|
@ -28,14 +28,14 @@ def inspect_hash(path):
|
||||||
|
|
||||||
|
|
||||||
def inspect_views(conn):
|
def inspect_views(conn):
|
||||||
" List views in a database. "
|
"""List views in a database."""
|
||||||
return [
|
return [
|
||||||
v[0] for v in conn.execute('select name from sqlite_master where type = "view"')
|
v[0] for v in conn.execute('select name from sqlite_master where type = "view"')
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def inspect_tables(conn, database_metadata):
|
def inspect_tables(conn, database_metadata):
|
||||||
" List tables and their row counts, excluding uninteresting tables. "
|
"""List tables and their row counts, excluding uninteresting tables."""
|
||||||
tables = {}
|
tables = {}
|
||||||
table_names = [
|
table_names = [
|
||||||
r["name"]
|
r["name"]
|
||||||
|
|
|
@ -27,7 +27,7 @@ def convert_specific_columns_to_json(rows, columns, json_cols):
|
||||||
|
|
||||||
|
|
||||||
def json_renderer(args, data, view_name):
|
def json_renderer(args, data, view_name):
|
||||||
""" Render a response as JSON """
|
"""Render a response as JSON"""
|
||||||
status_code = 200
|
status_code = 200
|
||||||
# Handle the _json= parameter which may modify data["rows"]
|
# Handle the _json= parameter which may modify data["rows"]
|
||||||
json_cols = []
|
json_cols = []
|
||||||
|
|
|
@ -71,7 +71,7 @@ async def await_me_maybe(value):
|
||||||
|
|
||||||
|
|
||||||
def urlsafe_components(token):
|
def urlsafe_components(token):
|
||||||
"Splits token on commas and URL decodes each component"
|
"""Splits token on commas and URL decodes each component"""
|
||||||
return [urllib.parse.unquote_plus(b) for b in token.split(",")]
|
return [urllib.parse.unquote_plus(b) for b in token.split(",")]
|
||||||
|
|
||||||
|
|
||||||
|
@ -451,7 +451,7 @@ def temporary_docker_directory(
|
||||||
|
|
||||||
|
|
||||||
def detect_primary_keys(conn, table):
|
def detect_primary_keys(conn, table):
|
||||||
" Figure out primary keys for a table. "
|
"""Figure out primary keys for a table."""
|
||||||
columns = table_column_details(conn, table)
|
columns = table_column_details(conn, table)
|
||||||
pks = [column for column in columns if column.is_pk]
|
pks = [column for column in columns if column.is_pk]
|
||||||
pks.sort(key=lambda column: column.is_pk)
|
pks.sort(key=lambda column: column.is_pk)
|
||||||
|
@ -521,7 +521,7 @@ def detect_spatialite(conn):
|
||||||
|
|
||||||
|
|
||||||
def detect_fts(conn, table):
|
def detect_fts(conn, table):
|
||||||
"Detect if table has a corresponding FTS virtual table and return it"
|
"""Detect if table has a corresponding FTS virtual table and return it"""
|
||||||
rows = conn.execute(detect_fts_sql(table)).fetchall()
|
rows = conn.execute(detect_fts_sql(table)).fetchall()
|
||||||
if len(rows) == 0:
|
if len(rows) == 0:
|
||||||
return None
|
return None
|
||||||
|
@ -620,7 +620,7 @@ whitespace_re = re.compile(r"\s")
|
||||||
|
|
||||||
|
|
||||||
def is_url(value):
|
def is_url(value):
|
||||||
"Must start with http:// or https:// and contain JUST a URL"
|
"""Must start with http:// or https:// and contain JUST a URL"""
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, str):
|
||||||
return False
|
return False
|
||||||
if not value.startswith("http://") and not value.startswith("https://"):
|
if not value.startswith("http://") and not value.startswith("https://"):
|
||||||
|
@ -863,14 +863,14 @@ class MultiParams:
|
||||||
return len(self._data)
|
return len(self._data)
|
||||||
|
|
||||||
def get(self, name, default=None):
|
def get(self, name, default=None):
|
||||||
"Return first value in the list, if available"
|
"""Return first value in the list, if available"""
|
||||||
try:
|
try:
|
||||||
return self._data.get(name)[0]
|
return self._data.get(name)[0]
|
||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def getlist(self, name):
|
def getlist(self, name):
|
||||||
"Return full list"
|
"""Return full list"""
|
||||||
return self._data.get(name) or []
|
return self._data.get(name) or []
|
||||||
|
|
||||||
|
|
||||||
|
@ -967,7 +967,7 @@ def actor_matches_allow(actor, allow):
|
||||||
|
|
||||||
|
|
||||||
async def check_visibility(datasette, actor, action, resource, default=True):
|
async def check_visibility(datasette, actor, action, resource, default=True):
|
||||||
"Returns (visible, private) - visible = can you see it, private = can others see it too"
|
"""Returns (visible, private) - visible = can you see it, private = can others see it too"""
|
||||||
visible = await datasette.permission_allowed(
|
visible = await datasette.permission_allowed(
|
||||||
actor,
|
actor,
|
||||||
action,
|
action,
|
||||||
|
@ -975,7 +975,7 @@ async def check_visibility(datasette, actor, action, resource, default=True):
|
||||||
default=default,
|
default=default,
|
||||||
)
|
)
|
||||||
if not visible:
|
if not visible:
|
||||||
return (False, False)
|
return False, False
|
||||||
private = not await datasette.permission_allowed(
|
private = not await datasette.permission_allowed(
|
||||||
None,
|
None,
|
||||||
action,
|
action,
|
||||||
|
@ -986,7 +986,7 @@ async def check_visibility(datasette, actor, action, resource, default=True):
|
||||||
|
|
||||||
|
|
||||||
def resolve_env_secrets(config, environ):
|
def resolve_env_secrets(config, environ):
|
||||||
'Create copy that recursively replaces {"$env": "NAME"} with values from environ'
|
"""Create copy that recursively replaces {"$env": "NAME"} with values from environ"""
|
||||||
if isinstance(config, dict):
|
if isinstance(config, dict):
|
||||||
if list(config.keys()) == ["$env"]:
|
if list(config.keys()) == ["$env"]:
|
||||||
return environ.get(list(config.values())[0])
|
return environ.get(list(config.values())[0])
|
||||||
|
@ -1023,7 +1023,7 @@ def find_spatialite():
|
||||||
|
|
||||||
|
|
||||||
async def initial_path_for_datasette(datasette):
|
async def initial_path_for_datasette(datasette):
|
||||||
"Return suggested path for opening this Datasette, based on number of DBs and tables"
|
"""Return suggested path for opening this Datasette, based on number of DBs and tables"""
|
||||||
databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"])
|
databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"])
|
||||||
if len(databases) == 1:
|
if len(databases) == 1:
|
||||||
db_name = next(iter(databases.keys()))
|
db_name = next(iter(databases.keys()))
|
||||||
|
|
|
@ -59,12 +59,10 @@ class Request:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
return dict(
|
return {
|
||||||
[
|
k.decode("latin-1").lower(): v.decode("latin-1")
|
||||||
(k.decode("latin-1").lower(), v.decode("latin-1"))
|
for k, v in self.scope.get("headers") or []
|
||||||
for k, v in self.scope.get("headers") or []
|
}
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host(self):
|
def host(self):
|
||||||
|
@ -115,7 +113,7 @@ class Request:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fake(cls, path_with_query_string, method="GET", scheme="http"):
|
def fake(cls, path_with_query_string, method="GET", scheme="http"):
|
||||||
"Useful for constructing Request objects for tests"
|
"""Useful for constructing Request objects for tests"""
|
||||||
path, _, query_string = path_with_query_string.partition("?")
|
path, _, query_string = path_with_query_string.partition("?")
|
||||||
scope = {
|
scope = {
|
||||||
"http_version": "1.1",
|
"http_version": "1.1",
|
||||||
|
@ -167,9 +165,7 @@ class AsgiStream:
|
||||||
|
|
||||||
async def asgi_send(self, send):
|
async def asgi_send(self, send):
|
||||||
# Remove any existing content-type header
|
# Remove any existing content-type header
|
||||||
headers = dict(
|
headers = {k: v for k, v in self.headers.items() if k.lower() != "content-type"}
|
||||||
[(k, v) for k, v in self.headers.items() if k.lower() != "content-type"]
|
|
||||||
)
|
|
||||||
headers["content-type"] = self.content_type
|
headers["content-type"] = self.content_type
|
||||||
await send(
|
await send(
|
||||||
{
|
{
|
||||||
|
@ -240,7 +236,7 @@ async def asgi_send(send, content, status, headers=None, content_type="text/plai
|
||||||
async def asgi_start(send, status, headers=None, content_type="text/plain"):
|
async def asgi_start(send, status, headers=None, content_type="text/plain"):
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
# Remove any existing content-type header
|
# Remove any existing content-type header
|
||||||
headers = dict([(k, v) for k, v in headers.items() if k.lower() != "content-type"])
|
headers = {k: v for k, v in headers.items() if k.lower() != "content-type"}
|
||||||
headers["content-type"] = content_type
|
headers["content-type"] = content_type
|
||||||
await send(
|
await send(
|
||||||
{
|
{
|
||||||
|
|
|
@ -74,7 +74,7 @@ class BaseView:
|
||||||
raise Forbidden(action)
|
raise Forbidden(action)
|
||||||
|
|
||||||
async def check_permissions(self, request, permissions):
|
async def check_permissions(self, request, permissions):
|
||||||
"permissions is a list of (action, resource) tuples or 'action' strings"
|
"""permissions is a list of (action, resource) tuples or 'action' strings"""
|
||||||
for permission in permissions:
|
for permission in permissions:
|
||||||
if isinstance(permission, str):
|
if isinstance(permission, str):
|
||||||
action = permission
|
action = permission
|
||||||
|
@ -280,7 +280,7 @@ class DataView(BaseView):
|
||||||
except (sqlite3.OperationalError, InvalidSql) as e:
|
except (sqlite3.OperationalError, InvalidSql) as e:
|
||||||
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
||||||
|
|
||||||
except (sqlite3.OperationalError) as e:
|
except sqlite3.OperationalError as e:
|
||||||
raise DatasetteError(str(e))
|
raise DatasetteError(str(e))
|
||||||
|
|
||||||
except DatasetteError:
|
except DatasetteError:
|
||||||
|
@ -451,7 +451,7 @@ class DataView(BaseView):
|
||||||
except (sqlite3.OperationalError, InvalidSql) as e:
|
except (sqlite3.OperationalError, InvalidSql) as e:
|
||||||
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
||||||
|
|
||||||
except (sqlite3.OperationalError) as e:
|
except sqlite3.OperationalError as e:
|
||||||
raise DatasetteError(str(e))
|
raise DatasetteError(str(e))
|
||||||
|
|
||||||
except DatasetteError:
|
except DatasetteError:
|
||||||
|
|
|
@ -87,7 +87,7 @@ class RowTableShared(DataView):
|
||||||
async def display_columns_and_rows(
|
async def display_columns_and_rows(
|
||||||
self, database, table, description, rows, link_column=False, truncate_cells=0
|
self, database, table, description, rows, link_column=False, truncate_cells=0
|
||||||
):
|
):
|
||||||
"Returns columns, rows for specified table - including fancy foreign key treatment"
|
"""Returns columns, rows for specified table - including fancy foreign key treatment"""
|
||||||
db = self.ds.databases[database]
|
db = self.ds.databases[database]
|
||||||
table_metadata = self.ds.table_metadata(database, table)
|
table_metadata = self.ds.table_metadata(database, table)
|
||||||
column_details = {col.name: col for col in await db.table_column_details(table)}
|
column_details = {col.name: col for col in await db.table_column_details(table)}
|
||||||
|
@ -743,7 +743,7 @@ class TableView(RowTableShared):
|
||||||
# Pagination next link
|
# Pagination next link
|
||||||
next_value = None
|
next_value = None
|
||||||
next_url = None
|
next_url = None
|
||||||
if len(rows) > page_size and page_size > 0:
|
if 0 < page_size < len(rows):
|
||||||
if is_view:
|
if is_view:
|
||||||
next_value = int(_next or 0) + page_size
|
next_value = int(_next or 0) + page_size
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -751,7 +751,7 @@ def assert_permissions_checked(datasette, actions):
|
||||||
help="Delete and recreate database if it exists",
|
help="Delete and recreate database if it exists",
|
||||||
)
|
)
|
||||||
def cli(db_filename, metadata, plugins_path, recreate):
|
def cli(db_filename, metadata, plugins_path, recreate):
|
||||||
"Write out the fixtures database used by Datasette's test suite"
|
"""Write out the fixtures database used by Datasette's test suite"""
|
||||||
if metadata and not metadata.endswith(".json"):
|
if metadata and not metadata.endswith(".json"):
|
||||||
raise click.ClickException("Metadata should end with .json")
|
raise click.ClickException("Metadata should end with .json")
|
||||||
if not db_filename.endswith(".db"):
|
if not db_filename.endswith(".db"):
|
||||||
|
|
|
@ -12,7 +12,7 @@ ureg = pint.UnitRegistry()
|
||||||
@hookimpl
|
@hookimpl
|
||||||
def prepare_connection(conn, database, datasette):
|
def prepare_connection(conn, database, datasette):
|
||||||
def convert_units(amount, from_, to_):
|
def convert_units(amount, from_, to_):
|
||||||
"select convert_units(100, 'm', 'ft');"
|
"""select convert_units(100, 'm', 'ft');"""
|
||||||
return (amount * ureg(from_)).to(to_).to_tuple()[0]
|
return (amount * ureg(from_)).to(to_).to_tuple()[0]
|
||||||
|
|
||||||
conn.create_function("convert_units", 3, convert_units)
|
conn.create_function("convert_units", 3, convert_units)
|
||||||
|
|
|
@ -789,7 +789,7 @@ def test_table_shape_object(app_client):
|
||||||
} == response.json
|
} == response.json
|
||||||
|
|
||||||
|
|
||||||
def test_table_shape_object_compound_primary_Key(app_client):
|
def test_table_shape_object_compound_primary_key(app_client):
|
||||||
response = app_client.get("/fixtures/compound_primary_key.json?_shape=object")
|
response = app_client.get("/fixtures/compound_primary_key.json?_shape=object")
|
||||||
assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json
|
assert {"a,b": {"pk1": "a", "pk2": "b", "content": "c"}} == response.json
|
||||||
|
|
||||||
|
@ -871,7 +871,7 @@ def test_validate_page_size(app_client, path, expected_error):
|
||||||
|
|
||||||
|
|
||||||
def test_page_size_zero(app_client):
|
def test_page_size_zero(app_client):
|
||||||
"For _size=0 we return the counts, empty rows and no continuation token"
|
"""For _size=0 we return the counts, empty rows and no continuation token"""
|
||||||
response = app_client.get("/fixtures/no_primary_key.json?_size=0")
|
response = app_client.get("/fixtures/no_primary_key.json?_size=0")
|
||||||
assert 200 == response.status
|
assert 200 == response.status
|
||||||
assert [] == response.json["rows"]
|
assert [] == response.json["rows"]
|
||||||
|
|
|
@ -5,7 +5,7 @@ import time
|
||||||
|
|
||||||
|
|
||||||
def test_auth_token(app_client):
|
def test_auth_token(app_client):
|
||||||
"The /-/auth-token endpoint sets the correct cookie"
|
"""The /-/auth-token endpoint sets the correct cookie"""
|
||||||
assert app_client.ds._root_token is not None
|
assert app_client.ds._root_token is not None
|
||||||
path = f"/-/auth-token?token={app_client.ds._root_token}"
|
path = f"/-/auth-token?token={app_client.ds._root_token}"
|
||||||
response = app_client.get(
|
response = app_client.get(
|
||||||
|
@ -29,7 +29,7 @@ def test_auth_token(app_client):
|
||||||
|
|
||||||
|
|
||||||
def test_actor_cookie(app_client):
|
def test_actor_cookie(app_client):
|
||||||
"A valid actor cookie sets request.scope['actor']"
|
"""A valid actor cookie sets request.scope['actor']"""
|
||||||
cookie = app_client.actor_cookie({"id": "test"})
|
cookie = app_client.actor_cookie({"id": "test"})
|
||||||
response = app_client.get("/", cookies={"ds_actor": cookie})
|
response = app_client.get("/", cookies={"ds_actor": cookie})
|
||||||
assert {"id": "test"} == app_client.ds._last_request.scope["actor"]
|
assert {"id": "test"} == app_client.ds._last_request.scope["actor"]
|
||||||
|
|
|
@ -110,7 +110,7 @@ def test_plugins_cli(app_client):
|
||||||
result2 = runner.invoke(cli, ["plugins", "--all"])
|
result2 = runner.invoke(cli, ["plugins", "--all"])
|
||||||
names = [p["name"] for p in json.loads(result2.output)]
|
names = [p["name"] for p in json.loads(result2.output)]
|
||||||
# Should have all the EXPECTED_PLUGINS
|
# Should have all the EXPECTED_PLUGINS
|
||||||
assert set(names).issuperset(set(p["name"] for p in EXPECTED_PLUGINS))
|
assert set(names).issuperset({p["name"] for p in EXPECTED_PLUGINS})
|
||||||
# And the following too:
|
# And the following too:
|
||||||
assert set(names).issuperset(DEFAULT_PLUGINS)
|
assert set(names).issuperset(DEFAULT_PLUGINS)
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ label_re = re.compile(r"\.\. _([^\s:]+):")
|
||||||
|
|
||||||
def get_headings(content, underline="-"):
|
def get_headings(content, underline="-"):
|
||||||
heading_re = re.compile(r"(\w+)(\([^)]*\))?\n\{}+\n".format(underline))
|
heading_re = re.compile(r"(\w+)(\([^)]*\))?\n\{}+\n".format(underline))
|
||||||
return set(h[0] for h in heading_re.findall(content))
|
return {h[0] for h in heading_re.findall(content)}
|
||||||
|
|
||||||
|
|
||||||
def get_labels(filename):
|
def get_labels(filename):
|
||||||
|
@ -96,11 +96,11 @@ def documented_table_filters():
|
||||||
json_api_rst = (docs_path / "json_api.rst").read_text()
|
json_api_rst = (docs_path / "json_api.rst").read_text()
|
||||||
section = json_api_rst.split(".. _table_arguments:")[-1]
|
section = json_api_rst.split(".. _table_arguments:")[-1]
|
||||||
# Lines starting with ``?column__exact= are docs for filters
|
# Lines starting with ``?column__exact= are docs for filters
|
||||||
return set(
|
return {
|
||||||
line.split("__")[1].split("=")[0]
|
line.split("__")[1].split("=")[0]
|
||||||
for line in section.split("\n")
|
for line in section.split("\n")
|
||||||
if line.startswith("``?column__")
|
if line.startswith("``?column__")
|
||||||
)
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("filter", [f.key for f in Filters._filters])
|
@pytest.mark.parametrize("filter", [f.key for f in Filters._filters])
|
||||||
|
|
|
@ -437,7 +437,7 @@ def cascade_app_client():
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_permissions_cascade(cascade_app_client, path, permissions, expected_status):
|
def test_permissions_cascade(cascade_app_client, path, permissions, expected_status):
|
||||||
"Test that e.g. having view-table but NOT view-database lets you view table page, etc"
|
"""Test that e.g. having view-table but NOT view-database lets you view table page, etc"""
|
||||||
allow = {"id": "*"}
|
allow = {"id": "*"}
|
||||||
deny = {}
|
deny = {}
|
||||||
previous_metadata = cascade_app_client.ds._metadata
|
previous_metadata = cascade_app_client.ds._metadata
|
||||||
|
|
|
@ -28,7 +28,7 @@ at_memory_re = re.compile(r" at 0x\w+")
|
||||||
"plugin_hook", [name for name in dir(pm.hook) if not name.startswith("_")]
|
"plugin_hook", [name for name in dir(pm.hook) if not name.startswith("_")]
|
||||||
)
|
)
|
||||||
def test_plugin_hooks_have_tests(plugin_hook):
|
def test_plugin_hooks_have_tests(plugin_hook):
|
||||||
"Every plugin hook should be referenced in this test module"
|
"""Every plugin hook should be referenced in this test module"""
|
||||||
tests_in_this_module = [t for t in globals().keys() if t.startswith("test_hook_")]
|
tests_in_this_module = [t for t in globals().keys() if t.startswith("test_hook_")]
|
||||||
ok = False
|
ok = False
|
||||||
for test in tests_in_this_module:
|
for test in tests_in_this_module:
|
||||||
|
|
Ładowanie…
Reference in New Issue