?_extra= support and TableView refactor to table_view

* Implemented ?_extra= option for JSON views, refs #262
* New dependency: asyncinject
* Remove now-obsolete TableView class
pull/2043/head
Simon Willison 2023-03-22 15:49:39 -07:00 zatwierdzone przez GitHub
rodzic 56b0758a5f
commit d97e82df3c
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
20 zmienionych plików z 1597 dodań i 1089 usunięć

Wyświetl plik

@ -1,5 +1,4 @@
import asyncio
from pydoc import plain
from typing import Sequence, Union, Tuple, Optional, Dict, Iterable
import asgi_csrf
import collections
@ -24,7 +23,12 @@ from pathlib import Path
from markupsafe import Markup, escape
from itsdangerous import URLSafeSerializer
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
from jinja2 import (
ChoiceLoader,
Environment,
FileSystemLoader,
PrefixLoader,
)
from jinja2.environment import Template
from jinja2.exceptions import TemplateNotFound
@ -42,7 +46,12 @@ from .views.special import (
PermissionsDebugView,
MessagesDebugView,
)
from .views.table import TableView, TableInsertView, TableUpsertView, TableDropView
from .views.table import (
TableInsertView,
TableUpsertView,
TableDropView,
table_view,
)
from .views.row import RowView, RowDeleteView, RowUpdateView
from .renderer import json_renderer
from .url_builder import Urls
@ -389,7 +398,10 @@ class Datasette:
]
)
self.jinja_env = Environment(
loader=template_loader, autoescape=True, enable_async=True
loader=template_loader,
autoescape=True,
enable_async=True,
# undefined=StrictUndefined,
)
self.jinja_env.filters["escape_css_string"] = escape_css_string
self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus
@ -1358,7 +1370,7 @@ class Datasette:
)
add_route(TableCreateView.as_view(self), r"/(?P<database>[^\/\.]+)/-/create$")
add_route(
TableView.as_view(self),
wrap_view(table_view, self),
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)(\.(?P<format>\w+))?$",
)
add_route(

Wyświetl plik

@ -136,6 +136,7 @@ def sqlite_extensions(fn):
multiple=True,
help="Path to a SQLite extension to load, and optional entrypoint",
)(fn)
# Wrap it in a custom error handler
@functools.wraps(fn)
def wrapped(*args, **kwargs):

Wyświetl plik

@ -4,6 +4,7 @@ from datasette.utils import (
remove_infinites,
CustomJSONEncoder,
path_from_row_pks,
sqlite3,
)
from datasette.utils.asgi import Response
@ -49,10 +50,14 @@ def json_renderer(args, data, view_name):
if data.get("error"):
shape = "objects"
next_url = data.get("next_url")
if shape == "arrayfirst":
data = [row[0] for row in data["rows"]]
if not data["rows"]:
data = []
elif isinstance(data["rows"][0], sqlite3.Row):
data = [row[0] for row in data["rows"]]
else:
assert isinstance(data["rows"][0], dict)
data = [next(iter(row.values())) for row in data["rows"]]
elif shape in ("objects", "object", "array"):
columns = data.get("columns")
rows = data.get("rows")
@ -80,7 +85,12 @@ def json_renderer(args, data, view_name):
data = data["rows"]
elif shape == "arrays":
pass
if not data["rows"]:
pass
elif isinstance(data["rows"][0], sqlite3.Row):
data["rows"] = [list(row) for row in data["rows"]]
else:
data["rows"] = [list(row.values()) for row in data["rows"]]
else:
status_code = 400
data = {
@ -98,8 +108,6 @@ def json_renderer(args, data, view_name):
body = json.dumps(data, cls=CustomJSONEncoder)
content_type = "application/json; charset=utf-8"
headers = {}
if next_url:
headers["link"] = f'<{next_url}>; rel="next"'
return Response(
body, status=status_code, headers=headers, content_type=content_type
)

Wyświetl plik

@ -1,6 +1,6 @@
{% if metadata.description_html or metadata.description %}
{% if metadata.get("description_html") or metadata.get("description") %}
<div class="metadata-description">
{% if metadata.description_html %}
{% if metadata.get("description_html") %}
{{ metadata.description_html|safe }}
{% else %}
{{ metadata.description }}

Wyświetl plik

@ -1,3 +1,3 @@
<p class="suggested-facets">
Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.type %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
Suggested facets: {% for facet in suggested_facets %}<a href="{{ facet.toggle_url }}#facet-{{ facet.name|to_css_class }}">{{ facet.name }}</a>{% if facet.get("type") %} ({{ facet.type }}){% endif %}{% if not loop.last %}, {% endif %}{% endfor %}
</p>

Wyświetl plik

@ -5,10 +5,10 @@
<link rel="stylesheet" href="{{ urls.static('app.css') }}?{{ app_css_hash }}">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
{% for url in extra_css_urls %}
<link rel="stylesheet" href="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
<link rel="stylesheet" href="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
{% endfor %}
{% for url in extra_js_urls %}
<script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.sri %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
<script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
{% endfor %}
{%- if alternate_url_json -%}
<link rel="alternate" type="application/json+datasette" href="{{ alternate_url_json }}">

Wyświetl plik

@ -22,7 +22,7 @@
{% block content %}
<div class="page-header" style="border-color: #{{ database_color(database) }}">
<h1>{{ metadata.title or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}</h1>
<h1>{{ metadata.get("title") or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}</h1>
{% set links = table_actions() %}{% if links %}
<details class="actions-menu-links details-menu">
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
@ -47,7 +47,7 @@
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
{% if metadata.columns %}
{% if metadata.get("columns") %}
<dl class="column-descriptions">
{% for column_name, column_description in metadata.columns.items() %}
<dt>{{ column_name }}</dt><dd>{{ column_description }}</dd>
@ -94,7 +94,7 @@
</div><div class="select-wrapper filter-op">
<select name="_filter_op">
{% for key, display, no_argument in filters.lookups() %}
<option value="{{ key }}{% if no_argument %}__1{% endif %}"{% if key == lookup %} selected{% endif %}>{{ display }}</option>
<option value="{{ key }}{% if no_argument %}__1{% endif %}">{{ display }}</option>
{% endfor %}
</select>
</div><input type="text" name="_filter_value" class="filter-value">

Wyświetl plik

@ -828,9 +828,18 @@ _infinities = {float("inf"), float("-inf")}
def remove_infinites(row):
if any((c in _infinities) if isinstance(c, float) else 0 for c in row):
to_check = row
if isinstance(row, dict):
to_check = row.values()
if not any((c in _infinities) if isinstance(c, float) else 0 for c in to_check):
return row
if isinstance(row, dict):
return {
k: (None if (isinstance(v, float) and v in _infinities) else v)
for k, v in row.items()
}
else:
return [None if (isinstance(c, float) and c in _infinities) else c for c in row]
return row
class StaticMount(click.ParamType):

Wyświetl plik

@ -174,176 +174,8 @@ class DataView(BaseView):
async def data(self, request):
raise NotImplementedError
def get_templates(self, database, table=None):
assert NotImplemented
async def as_csv(self, request, database):
kwargs = {}
stream = request.args.get("_stream")
# Do not calculate facets or counts:
extra_parameters = [
"{}=1".format(key)
for key in ("_nofacet", "_nocount")
if not request.args.get(key)
]
if extra_parameters:
# Replace request object with a new one with modified scope
if not request.query_string:
new_query_string = "&".join(extra_parameters)
else:
new_query_string = (
request.query_string + "&" + "&".join(extra_parameters)
)
new_scope = dict(
request.scope, query_string=new_query_string.encode("latin-1")
)
receive = request.receive
request = Request(new_scope, receive)
if stream:
# Some quick soundness checks
if not self.ds.setting("allow_csv_stream"):
raise BadRequest("CSV streaming is disabled")
if request.args.get("_next"):
raise BadRequest("_next not allowed for CSV streaming")
kwargs["_size"] = "max"
# Fetch the first page
try:
response_or_template_contexts = await self.data(request)
if isinstance(response_or_template_contexts, Response):
return response_or_template_contexts
elif len(response_or_template_contexts) == 4:
data, _, _, _ = response_or_template_contexts
else:
data, _, _ = response_or_template_contexts
except (sqlite3.OperationalError, InvalidSql) as e:
raise DatasetteError(str(e), title="Invalid SQL", status=400)
except sqlite3.OperationalError as e:
raise DatasetteError(str(e))
except DatasetteError:
raise
# Convert rows and columns to CSV
headings = data["columns"]
# if there are expanded_columns we need to add additional headings
expanded_columns = set(data.get("expanded_columns") or [])
if expanded_columns:
headings = []
for column in data["columns"]:
headings.append(column)
if column in expanded_columns:
headings.append(f"{column}_label")
content_type = "text/plain; charset=utf-8"
preamble = ""
postamble = ""
trace = request.args.get("_trace")
if trace:
content_type = "text/html; charset=utf-8"
preamble = (
"<html><head><title>CSV debug</title></head>"
'<body><textarea style="width: 90%; height: 70vh">'
)
postamble = "</textarea></body></html>"
async def stream_fn(r):
nonlocal data, trace
limited_writer = LimitedWriter(r, self.ds.setting("max_csv_mb"))
if trace:
await limited_writer.write(preamble)
writer = csv.writer(EscapeHtmlWriter(limited_writer))
else:
writer = csv.writer(limited_writer)
first = True
next = None
while first or (next and stream):
try:
kwargs = {}
if next:
kwargs["_next"] = next
if not first:
data, _, _ = await self.data(request, **kwargs)
if first:
if request.args.get("_header") != "off":
await writer.writerow(headings)
first = False
next = data.get("next")
for row in data["rows"]:
if any(isinstance(r, bytes) for r in row):
new_row = []
for column, cell in zip(headings, row):
if isinstance(cell, bytes):
# If this is a table page, use .urls.row_blob()
if data.get("table"):
pks = data.get("primary_keys") or []
cell = self.ds.absolute_url(
request,
self.ds.urls.row_blob(
database,
data["table"],
path_from_row_pks(row, pks, not pks),
column,
),
)
else:
# Otherwise generate URL for this query
url = self.ds.absolute_url(
request,
path_with_format(
request=request,
format="blob",
extra_qs={
"_blob_column": column,
"_blob_hash": hashlib.sha256(
cell
).hexdigest(),
},
replace_format="csv",
),
)
cell = url.replace("&_nocount=1", "").replace(
"&_nofacet=1", ""
)
new_row.append(cell)
row = new_row
if not expanded_columns:
# Simple path
await writer.writerow(row)
else:
# Look for {"value": "label": } dicts and expand
new_row = []
for heading, cell in zip(data["columns"], row):
if heading in expanded_columns:
if cell is None:
new_row.extend(("", ""))
else:
assert isinstance(cell, dict)
new_row.append(cell["value"])
new_row.append(cell["label"])
else:
new_row.append(cell)
await writer.writerow(new_row)
except Exception as e:
sys.stderr.write("Caught this error: {}\n".format(e))
sys.stderr.flush()
await r.write(str(e))
return
await limited_writer.write(postamble)
headers = {}
if self.ds.cors:
add_cors_headers(headers)
if request.args.get("_dl", None):
if not trace:
content_type = "text/csv; charset=utf-8"
disposition = 'attachment; filename="{}.csv"'.format(
request.url_vars.get("table", database)
)
headers["content-disposition"] = disposition
return AsgiStream(stream_fn, headers=headers, content_type=content_type)
return await stream_csv(self.ds, self.data, request, database)
async def get(self, request):
db = await self.ds.resolve_database(request)
@ -518,7 +350,7 @@ class DataView(BaseView):
},
}
if "metadata" not in context:
context["metadata"] = self.ds.metadata
context["metadata"] = self.ds.metadata()
r = await self.render(templates, request=request, context=context)
if status_code is not None:
r.status = status_code
@ -546,3 +378,169 @@ class DataView(BaseView):
def _error(messages, status=400):
return Response.json({"ok": False, "errors": messages}, status=status)
async def stream_csv(datasette, fetch_data, request, database):
kwargs = {}
stream = request.args.get("_stream")
# Do not calculate facets or counts:
extra_parameters = [
"{}=1".format(key)
for key in ("_nofacet", "_nocount")
if not request.args.get(key)
]
if extra_parameters:
# Replace request object with a new one with modified scope
if not request.query_string:
new_query_string = "&".join(extra_parameters)
else:
new_query_string = request.query_string + "&" + "&".join(extra_parameters)
new_scope = dict(request.scope, query_string=new_query_string.encode("latin-1"))
receive = request.receive
request = Request(new_scope, receive)
if stream:
# Some quick soundness checks
if not datasette.setting("allow_csv_stream"):
raise BadRequest("CSV streaming is disabled")
if request.args.get("_next"):
raise BadRequest("_next not allowed for CSV streaming")
kwargs["_size"] = "max"
# Fetch the first page
try:
response_or_template_contexts = await fetch_data(request)
if isinstance(response_or_template_contexts, Response):
return response_or_template_contexts
elif len(response_or_template_contexts) == 4:
data, _, _, _ = response_or_template_contexts
else:
data, _, _ = response_or_template_contexts
except (sqlite3.OperationalError, InvalidSql) as e:
raise DatasetteError(str(e), title="Invalid SQL", status=400)
except sqlite3.OperationalError as e:
raise DatasetteError(str(e))
except DatasetteError:
raise
# Convert rows and columns to CSV
headings = data["columns"]
# if there are expanded_columns we need to add additional headings
expanded_columns = set(data.get("expanded_columns") or [])
if expanded_columns:
headings = []
for column in data["columns"]:
headings.append(column)
if column in expanded_columns:
headings.append(f"{column}_label")
content_type = "text/plain; charset=utf-8"
preamble = ""
postamble = ""
trace = request.args.get("_trace")
if trace:
content_type = "text/html; charset=utf-8"
preamble = (
"<html><head><title>CSV debug</title></head>"
'<body><textarea style="width: 90%; height: 70vh">'
)
postamble = "</textarea></body></html>"
async def stream_fn(r):
nonlocal data, trace
print("max_csv_mb", datasette.setting("max_csv_mb"))
limited_writer = LimitedWriter(r, datasette.setting("max_csv_mb"))
if trace:
await limited_writer.write(preamble)
writer = csv.writer(EscapeHtmlWriter(limited_writer))
else:
writer = csv.writer(limited_writer)
first = True
next = None
while first or (next and stream):
try:
kwargs = {}
if next:
kwargs["_next"] = next
if not first:
data, _, _ = await fetch_data(request, **kwargs)
if first:
if request.args.get("_header") != "off":
await writer.writerow(headings)
first = False
next = data.get("next")
for row in data["rows"]:
if any(isinstance(r, bytes) for r in row):
new_row = []
for column, cell in zip(headings, row):
if isinstance(cell, bytes):
# If this is a table page, use .urls.row_blob()
if data.get("table"):
pks = data.get("primary_keys") or []
cell = datasette.absolute_url(
request,
datasette.urls.row_blob(
database,
data["table"],
path_from_row_pks(row, pks, not pks),
column,
),
)
else:
# Otherwise generate URL for this query
url = datasette.absolute_url(
request,
path_with_format(
request=request,
format="blob",
extra_qs={
"_blob_column": column,
"_blob_hash": hashlib.sha256(
cell
).hexdigest(),
},
replace_format="csv",
),
)
cell = url.replace("&_nocount=1", "").replace(
"&_nofacet=1", ""
)
new_row.append(cell)
row = new_row
if not expanded_columns:
# Simple path
await writer.writerow(row)
else:
# Look for {"value": "label": } dicts and expand
new_row = []
for heading, cell in zip(data["columns"], row):
if heading in expanded_columns:
if cell is None:
new_row.extend(("", ""))
else:
assert isinstance(cell, dict)
new_row.append(cell["value"])
new_row.append(cell["label"])
else:
new_row.append(cell)
await writer.writerow(new_row)
except Exception as e:
sys.stderr.write("Caught this error: {}\n".format(e))
sys.stderr.flush()
await r.write(str(e))
return
await limited_writer.write(postamble)
headers = {}
if datasette.cors:
add_cors_headers(headers)
if request.args.get("_dl", None):
if not trace:
content_type = "text/csv; charset=utf-8"
disposition = 'attachment; filename="{}.csv"'.format(
request.url_vars.get("table", database)
)
headers["content-disposition"] = disposition
return AsgiStream(stream_fn, headers=headers, content_type=content_type)

Wyświetl plik

@ -223,6 +223,7 @@ class QueryView(DataView):
_size=None,
named_parameters=None,
write=False,
default_labels=None,
):
db = await self.ds.resolve_database(request)
database = db.name

Plik diff jest za duży Load Diff

Wyświetl plik

@ -58,6 +58,7 @@ setup(
"mergedeep>=1.1.1",
"itsdangerous>=1.1",
"sqlite-utils>=3.30",
"asyncinject>=0.5",
],
entry_points="""
[console_scripts]

Wyświetl plik

@ -896,9 +896,11 @@ def test_config_cache_size(app_client_larger_cache_size):
def test_config_force_https_urls():
with make_app_client(settings={"force_https_urls": True}) as client:
response = client.get("/fixtures/facetable.json?_size=3&_facet=state")
response = client.get(
"/fixtures/facetable.json?_size=3&_facet=state&_extra=next_url,suggested_facets"
)
assert response.json["next_url"].startswith("https://")
assert response.json["facet_results"]["state"]["results"][0][
assert response.json["facet_results"]["results"]["state"]["results"][0][
"toggle_url"
].startswith("https://")
assert response.json["suggested_facets"][0]["toggle_url"].startswith("https://")
@ -981,7 +983,9 @@ def test_common_prefix_database_names(app_client_conflicting_database_names):
def test_inspect_file_used_for_count(app_client_immutable_and_inspect_file):
response = app_client_immutable_and_inspect_file.get("/fixtures/sortable.json")
response = app_client_immutable_and_inspect_file.get(
"/fixtures/sortable.json?_extra=count"
)
assert response.json["count"] == 100

Wyświetl plik

@ -419,7 +419,7 @@ async def test_array_facet_handle_duplicate_tags():
)
response = await ds.client.get("/test_array_facet/otters.json?_facet_array=tags")
assert response.json()["facet_results"]["tags"] == {
assert response.json()["facet_results"]["results"]["tags"] == {
"name": "tags",
"type": "array",
"results": [
@ -517,13 +517,13 @@ async def test_json_array_with_blanks_and_nulls():
await db.execute_write("create table foo(json_column text)")
for value in ('["a", "b", "c"]', '["a", "b"]', "", None):
await db.execute_write("insert into foo (json_column) values (?)", [value])
response = await ds.client.get("/test_json_array/foo.json")
response = await ds.client.get("/test_json_array/foo.json?_extra=suggested_facets")
data = response.json()
assert data["suggested_facets"] == [
{
"name": "json_column",
"type": "array",
"toggle_url": "http://localhost/test_json_array/foo.json?_facet_array=json_column",
"toggle_url": "http://localhost/test_json_array/foo.json?_extra=suggested_facets&_facet_array=json_column",
}
]
@ -539,27 +539,29 @@ async def test_facet_size():
"insert into neighbourhoods (city, neighbourhood) values (?, ?)",
["City {}".format(i), "Neighbourhood {}".format(j)],
)
response = await ds.client.get("/test_facet_size/neighbourhoods.json")
response = await ds.client.get(
"/test_facet_size/neighbourhoods.json?_extra=suggested_facets"
)
data = response.json()
assert data["suggested_facets"] == [
{
"name": "neighbourhood",
"toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet=neighbourhood",
"toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_extra=suggested_facets&_facet=neighbourhood",
}
]
# Bump up _facet_size= to suggest city too
response2 = await ds.client.get(
"/test_facet_size/neighbourhoods.json?_facet_size=50"
"/test_facet_size/neighbourhoods.json?_facet_size=50&_extra=suggested_facets"
)
data2 = response2.json()
assert sorted(data2["suggested_facets"], key=lambda f: f["name"]) == [
{
"name": "city",
"toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city",
"toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_extra=suggested_facets&_facet=city",
},
{
"name": "neighbourhood",
"toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=neighbourhood",
"toggle_url": "http://localhost/test_facet_size/neighbourhoods.json?_facet_size=50&_extra=suggested_facets&_facet=neighbourhood",
},
]
# Facet by city should return expected number of results
@ -567,20 +569,20 @@ async def test_facet_size():
"/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city"
)
data3 = response3.json()
assert len(data3["facet_results"]["city"]["results"]) == 50
assert len(data3["facet_results"]["results"]["city"]["results"]) == 50
# Reduce max_returned_rows and check that it's respected
ds._settings["max_returned_rows"] = 20
response4 = await ds.client.get(
"/test_facet_size/neighbourhoods.json?_facet_size=50&_facet=city"
)
data4 = response4.json()
assert len(data4["facet_results"]["city"]["results"]) == 20
assert len(data4["facet_results"]["results"]["city"]["results"]) == 20
# Test _facet_size=max
response5 = await ds.client.get(
"/test_facet_size/neighbourhoods.json?_facet_size=max&_facet=city"
)
data5 = response5.json()
assert len(data5["facet_results"]["city"]["results"]) == 20
assert len(data5["facet_results"]["results"]["city"]["results"]) == 20
# Now try messing with facet_size in the table metadata
orig_metadata = ds._metadata_local
try:
@ -593,7 +595,7 @@ async def test_facet_size():
"/test_facet_size/neighbourhoods.json?_facet=city"
)
data6 = response6.json()
assert len(data6["facet_results"]["city"]["results"]) == 6
assert len(data6["facet_results"]["results"]["city"]["results"]) == 6
# Setting it to max bumps it up to 50 again
ds._metadata_local["databases"]["test_facet_size"]["tables"]["neighbourhoods"][
"facet_size"
@ -601,7 +603,7 @@ async def test_facet_size():
data7 = (
await ds.client.get("/test_facet_size/neighbourhoods.json?_facet=city")
).json()
assert len(data7["facet_results"]["city"]["results"]) == 20
assert len(data7["facet_results"]["results"]["city"]["results"]) == 20
finally:
ds._metadata_local = orig_metadata
@ -635,7 +637,7 @@ async def test_conflicting_facet_names_json(ds_client):
"/fixtures/facetable.json?_facet=created&_facet_date=created"
"&_facet=tags&_facet_array=tags"
)
assert set(response.json()["facet_results"].keys()) == {
assert set(response.json()["facet_results"]["results"].keys()) == {
"created",
"tags",
"created_2",

Wyświetl plik

@ -82,13 +82,11 @@ async def test_through_filters_from_request(ds_client):
request = Request.fake(
'/?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}'
)
filter_args = await (
through_filters(
request=request,
datasette=ds_client.ds,
table="roadside_attractions",
database="fixtures",
)
filter_args = await through_filters(
request=request,
datasette=ds_client.ds,
table="roadside_attractions",
database="fixtures",
)()
assert filter_args.where_clauses == [
"pk in (select attraction_id from roadside_attraction_characteristics where characteristic_id = :p0)"
@ -105,13 +103,11 @@ async def test_through_filters_from_request(ds_client):
request = Request.fake(
'/?_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}'
)
filter_args = await (
through_filters(
request=request,
datasette=ds_client.ds,
table="roadside_attractions",
database="fixtures",
)
filter_args = await through_filters(
request=request,
datasette=ds_client.ds,
table="roadside_attractions",
database="fixtures",
)()
assert filter_args.where_clauses == [
"pk in (select attraction_id from roadside_attraction_characteristics where characteristic_id = :p0)"
@ -127,12 +123,10 @@ async def test_through_filters_from_request(ds_client):
async def test_where_filters_from_request(ds_client):
await ds_client.ds.invoke_startup()
request = Request.fake("/?_where=pk+>+3")
filter_args = await (
where_filters(
request=request,
datasette=ds_client.ds,
database="fixtures",
)
filter_args = await where_filters(
request=request,
datasette=ds_client.ds,
database="fixtures",
)()
assert filter_args.where_clauses == ["pk > 3"]
assert filter_args.params == {}
@ -145,13 +139,11 @@ async def test_where_filters_from_request(ds_client):
@pytest.mark.asyncio
async def test_search_filters_from_request(ds_client):
request = Request.fake("/?_search=bobcat")
filter_args = await (
search_filters(
request=request,
datasette=ds_client.ds,
database="fixtures",
table="searchable",
)
filter_args = await search_filters(
request=request,
datasette=ds_client.ds,
database="fixtures",
table="searchable",
)()
assert filter_args.where_clauses == [
"rowid in (select rowid from searchable_fts where searchable_fts match escape_fts(:search))"

Wyświetl plik

@ -8,6 +8,7 @@ from pathlib import Path
# this resolves to "./ext", which is enough for SQLite to calculate the rest
COMPILED_EXTENSION_PATH = str(Path(__file__).parent / "ext")
# See if ext.c has been compiled, based off the different possible suffixes.
def has_compiled_ext():
for ext in ["dylib", "so", "dll"]:
@ -20,7 +21,6 @@ def has_compiled_ext():
@pytest.mark.asyncio
@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c")
async def test_load_extension_default_entrypoint():
# The default entrypoint only loads a() and NOT b() or c(), so those
# should fail.
ds = Datasette(sqlite_extensions=[COMPILED_EXTENSION_PATH])
@ -41,7 +41,6 @@ async def test_load_extension_default_entrypoint():
@pytest.mark.asyncio
@pytest.mark.skipif(not has_compiled_ext(), reason="Requires compiled ext.c")
async def test_load_extension_multiple_entrypoints():
# Load in the default entrypoint and the other 2 custom entrypoints, now
# all a(), b(), and c() should run successfully.
ds = Datasette(

Wyświetl plik

@ -595,42 +595,42 @@ def test_hook_publish_subcommand():
@pytest.mark.asyncio
async def test_hook_register_facet_classes(ds_client):
response = await ds_client.get(
"/fixtures/compound_three_primary_keys.json?_dummy_facet=1"
"/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets"
)
assert [
assert response.json()["suggested_facets"] == [
{
"name": "pk1",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk1",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet_dummy=pk1",
"type": "dummy",
},
{
"name": "pk2",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk2",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet_dummy=pk2",
"type": "dummy",
},
{
"name": "pk3",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=pk3",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet_dummy=pk3",
"type": "dummy",
},
{
"name": "content",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet_dummy=content",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet_dummy=content",
"type": "dummy",
},
{
"name": "pk1",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk1",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet=pk1",
},
{
"name": "pk2",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk2",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet=pk2",
},
{
"name": "pk3",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_facet=pk3",
"toggle_url": "http://localhost/fixtures/compound_three_primary_keys.json?_dummy_facet=1&_extra=suggested_facets&_facet=pk3",
},
] == response.json()["suggested_facets"]
]
@pytest.mark.asyncio

Wyświetl plik

@ -11,7 +11,7 @@ def routes():
@pytest.mark.parametrize(
"path,expected_class,expected_matches",
"path,expected_name,expected_matches",
(
("/", "IndexView", {"format": None}),
("/foo", "DatabaseView", {"format": None, "database": "foo"}),
@ -20,17 +20,17 @@ def routes():
("/foo.humbug", "DatabaseView", {"format": "humbug", "database": "foo"}),
(
"/foo/humbug",
"TableView",
"table_view",
{"database": "foo", "table": "humbug", "format": None},
),
(
"/foo/humbug.json",
"TableView",
"table_view",
{"database": "foo", "table": "humbug", "format": "json"},
),
(
"/foo/humbug.blah",
"TableView",
"table_view",
{"database": "foo", "table": "humbug", "format": "blah"},
),
(
@ -47,12 +47,14 @@ def routes():
("/-/metadata", "JsonDataView", {"format": None}),
),
)
def test_routes(routes, path, expected_class, expected_matches):
def test_routes(routes, path, expected_name, expected_matches):
match, view = resolve_routes(routes, path)
if expected_class is None:
if expected_name is None:
assert match is None
else:
assert view.view_class.__name__ == expected_class
assert (
view.__name__ == expected_name or view.view_class.__name__ == expected_name
)
assert match.groupdict() == expected_matches

Wyświetl plik

@ -15,7 +15,7 @@ import urllib
@pytest.mark.asyncio
async def test_table_json(ds_client):
response = await ds_client.get("/fixtures/simple_primary_key.json?_shape=objects")
response = await ds_client.get("/fixtures/simple_primary_key.json?_extra=query")
assert response.status_code == 200
data = response.json()
assert (
@ -198,6 +198,10 @@ async def test_paginate_tables_and_views(
fetched = []
count = 0
while path:
if "?" in path:
path += "&_extra=next_url"
else:
path += "?_extra=next_url"
response = await ds_client.get(path)
assert response.status_code == 200
count += 1
@ -230,7 +234,9 @@ async def test_validate_page_size(ds_client, path, expected_error):
@pytest.mark.asyncio
async def test_page_size_zero(ds_client):
"""For _size=0 we return the counts, empty rows and no continuation token"""
response = await ds_client.get("/fixtures/no_primary_key.json?_size=0")
response = await ds_client.get(
"/fixtures/no_primary_key.json?_size=0&_extra=count,next_url"
)
assert response.status_code == 200
assert [] == response.json()["rows"]
assert 201 == response.json()["count"]
@ -241,7 +247,7 @@ async def test_page_size_zero(ds_client):
@pytest.mark.asyncio
async def test_paginate_compound_keys(ds_client):
fetched = []
path = "/fixtures/compound_three_primary_keys.json?_shape=objects"
path = "/fixtures/compound_three_primary_keys.json?_shape=objects&_extra=next_url"
page = 0
while path:
page += 1
@ -262,9 +268,7 @@ async def test_paginate_compound_keys(ds_client):
@pytest.mark.asyncio
async def test_paginate_compound_keys_with_extra_filters(ds_client):
fetched = []
path = (
"/fixtures/compound_three_primary_keys.json?content__contains=d&_shape=objects"
)
path = "/fixtures/compound_three_primary_keys.json?content__contains=d&_shape=objects&_extra=next_url"
page = 0
while path:
page += 1
@ -315,7 +319,7 @@ async def test_paginate_compound_keys_with_extra_filters(ds_client):
],
)
async def test_sortable(ds_client, query_string, sort_key, human_description_en):
path = f"/fixtures/sortable.json?_shape=objects&{query_string}"
path = f"/fixtures/sortable.json?_shape=objects&_extra=human_description_en,next_url&{query_string}"
fetched = []
page = 0
while path:
@ -338,6 +342,7 @@ async def test_sortable_and_filtered(ds_client):
path = (
"/fixtures/sortable.json"
"?content__contains=d&_sort_desc=sortable&_shape=objects"
"&_extra=human_description_en,count"
)
response = await ds_client.get(path)
fetched = response.json()["rows"]
@ -660,7 +665,9 @@ def test_table_filter_extra_where_disabled_if_no_sql_allowed():
async def test_table_through(ds_client):
# Just the museums:
response = await ds_client.get(
'/fixtures/roadside_attractions.json?_shape=arrays&_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}'
"/fixtures/roadside_attractions.json?_shape=arrays"
'&_through={"table":"roadside_attraction_characteristics","column":"characteristic_id","value":"1"}'
"&_extra=human_description_en"
)
assert response.json()["rows"] == [
[
@ -712,6 +719,7 @@ async def test_view(ds_client):
]
@pytest.mark.xfail
@pytest.mark.asyncio
async def test_unit_filters(ds_client):
response = await ds_client.get(
@ -731,7 +739,7 @@ def test_page_size_matching_max_returned_rows(
app_client_returned_rows_matches_page_size,
):
fetched = []
path = "/fixtures/no_primary_key.json"
path = "/fixtures/no_primary_key.json?_extra=next_url"
while path:
response = app_client_returned_rows_matches_page_size.get(path)
fetched.extend(response.json["rows"])
@ -911,12 +919,42 @@ async def test_facets(ds_client, path, expected_facet_results):
response = await ds_client.get(path)
facet_results = response.json()["facet_results"]
# We only compare the querystring portion of the taggle_url
for facet_name, facet_info in facet_results.items():
for facet_name, facet_info in facet_results["results"].items():
assert facet_name == facet_info["name"]
assert False is facet_info["truncated"]
for facet_value in facet_info["results"]:
facet_value["toggle_url"] = facet_value["toggle_url"].split("?")[1]
assert expected_facet_results == facet_results
assert expected_facet_results == facet_results["results"]
@pytest.mark.asyncio
@pytest.mark.skipif(not detect_json1(), reason="requires JSON1 extension")
async def test_facets_array(ds_client):
response = await ds_client.get("/fixtures/facetable.json?_facet_array=tags")
facet_results = response.json()["facet_results"]
assert facet_results["results"]["tags"]["results"] == [
{
"value": "tag1",
"label": "tag1",
"count": 2,
"toggle_url": "http://localhost/fixtures/facetable.json?_facet_array=tags&tags__arraycontains=tag1",
"selected": False,
},
{
"value": "tag2",
"label": "tag2",
"count": 1,
"toggle_url": "http://localhost/fixtures/facetable.json?_facet_array=tags&tags__arraycontains=tag2",
"selected": False,
},
{
"value": "tag3",
"label": "tag3",
"count": 1,
"toggle_url": "http://localhost/fixtures/facetable.json?_facet_array=tags&tags__arraycontains=tag3",
"selected": False,
},
]
@pytest.mark.asyncio
@ -926,58 +964,83 @@ async def test_suggested_facets(ds_client):
"name": suggestion["name"],
"querystring": suggestion["toggle_url"].split("?")[-1],
}
for suggestion in (await ds_client.get("/fixtures/facetable.json")).json()[
"suggested_facets"
]
for suggestion in (
await ds_client.get("/fixtures/facetable.json?_extra=suggested_facets")
).json()["suggested_facets"]
]
expected = [
{"name": "created", "querystring": "_facet=created"},
{"name": "planet_int", "querystring": "_facet=planet_int"},
{"name": "on_earth", "querystring": "_facet=on_earth"},
{"name": "state", "querystring": "_facet=state"},
{"name": "_city_id", "querystring": "_facet=_city_id"},
{"name": "_neighborhood", "querystring": "_facet=_neighborhood"},
{"name": "tags", "querystring": "_facet=tags"},
{"name": "complex_array", "querystring": "_facet=complex_array"},
{"name": "created", "querystring": "_facet_date=created"},
{"name": "created", "querystring": "_extra=suggested_facets&_facet=created"},
{
"name": "planet_int",
"querystring": "_extra=suggested_facets&_facet=planet_int",
},
{"name": "on_earth", "querystring": "_extra=suggested_facets&_facet=on_earth"},
{"name": "state", "querystring": "_extra=suggested_facets&_facet=state"},
{"name": "_city_id", "querystring": "_extra=suggested_facets&_facet=_city_id"},
{
"name": "_neighborhood",
"querystring": "_extra=suggested_facets&_facet=_neighborhood",
},
{"name": "tags", "querystring": "_extra=suggested_facets&_facet=tags"},
{
"name": "complex_array",
"querystring": "_extra=suggested_facets&_facet=complex_array",
},
{
"name": "created",
"querystring": "_extra=suggested_facets&_facet_date=created",
},
]
if detect_json1():
expected.append({"name": "tags", "querystring": "_facet_array=tags"})
expected.append(
{"name": "tags", "querystring": "_extra=suggested_facets&_facet_array=tags"}
)
assert expected == suggestions
def test_allow_facet_off():
with make_app_client(settings={"allow_facet": False}) as client:
assert 400 == client.get("/fixtures/facetable.json?_facet=planet_int").status
assert (
client.get(
"/fixtures/facetable.json?_facet=planet_int&_extra=suggested_facets"
).status
== 400
)
data = client.get("/fixtures/facetable.json?_extra=suggested_facets").json
# Should not suggest any facets either:
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
assert [] == data["suggested_facets"]
def test_suggest_facets_off():
with make_app_client(settings={"suggest_facets": False}) as client:
# Now suggested_facets should be []
assert [] == client.get("/fixtures/facetable.json").json["suggested_facets"]
assert (
[]
== client.get("/fixtures/facetable.json?_extra=suggested_facets").json[
"suggested_facets"
]
)
@pytest.mark.asyncio
@pytest.mark.parametrize("nofacet", (True, False))
async def test_nofacet(ds_client, nofacet):
path = "/fixtures/facetable.json?_facet=state"
path = "/fixtures/facetable.json?_facet=state&_extra=suggested_facets"
if nofacet:
path += "&_nofacet=1"
response = await ds_client.get(path)
if nofacet:
assert response.json()["suggested_facets"] == []
assert response.json()["facet_results"] == {}
assert response.json()["facet_results"]["results"] == {}
else:
assert response.json()["suggested_facets"] != []
assert response.json()["facet_results"] != {}
assert response.json()["facet_results"]["results"] != {}
@pytest.mark.asyncio
@pytest.mark.parametrize("nosuggest", (True, False))
async def test_nosuggest(ds_client, nosuggest):
path = "/fixtures/facetable.json?_facet=state"
path = "/fixtures/facetable.json?_facet=state&_extra=suggested_facets"
if nosuggest:
path += "&_nosuggest=1"
response = await ds_client.get(path)
@ -993,9 +1056,9 @@ async def test_nosuggest(ds_client, nosuggest):
@pytest.mark.asyncio
@pytest.mark.parametrize("nocount,expected_count", ((True, None), (False, 15)))
async def test_nocount(ds_client, nocount, expected_count):
path = "/fixtures/facetable.json"
path = "/fixtures/facetable.json?_extra=count"
if nocount:
path += "?_nocount=1"
path += "&_nocount=1"
response = await ds_client.get(path)
assert response.json()["count"] == expected_count
@ -1280,7 +1343,7 @@ def test_generated_columns_are_visible_in_datasette():
),
)
async def test_col_nocol(ds_client, path, expected_columns):
response = await ds_client.get(path)
response = await ds_client.get(path + "&_extra=columns")
assert response.status_code == 200
columns = response.json()["columns"]
assert columns == expected_columns

Wyświetl plik

@ -1160,6 +1160,13 @@ async def test_table_page_title(ds_client, path, expected):
assert title == expected
@pytest.mark.asyncio
async def test_table_post_method_not_allowed(ds_client):
response = await ds_client.post("/fixtures/facetable")
assert response.status_code == 405
assert "Method not allowed" in response.text
@pytest.mark.parametrize("allow_facet", (True, False))
def test_allow_facet_off(allow_facet):
with make_app_client(settings={"allow_facet": allow_facet}) as client: