2018-05-13 12:58:28 +00:00
|
|
|
import asyncio
|
2018-06-15 06:51:23 +00:00
|
|
|
import csv
|
2020-10-29 22:47:32 +00:00
|
|
|
import hashlib
|
2018-05-13 12:58:28 +00:00
|
|
|
import re
|
2020-12-05 05:21:11 +00:00
|
|
|
import sys
|
2018-05-13 12:58:28 +00:00
|
|
|
import time
|
2018-06-15 06:51:23 +00:00
|
|
|
import urllib
|
2018-05-13 12:58:28 +00:00
|
|
|
|
|
|
|
import pint
|
2019-06-24 03:13:09 +00:00
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
from datasette import __version__
|
2020-10-30 17:54:47 +00:00
|
|
|
from datasette.plugins import pm
|
2020-05-08 16:05:46 +00:00
|
|
|
from datasette.database import QueryInterrupted
|
2018-05-13 12:44:22 +00:00
|
|
|
from datasette.utils import (
|
2020-09-02 22:21:12 +00:00
|
|
|
await_me_maybe,
|
2021-06-01 15:49:50 +00:00
|
|
|
EscapeHtmlWriter,
|
2018-05-13 12:44:22 +00:00
|
|
|
InvalidSql,
|
2018-06-18 03:21:02 +00:00
|
|
|
LimitedWriter,
|
2020-05-28 02:21:41 +00:00
|
|
|
call_with_supported_arguments,
|
2020-10-29 22:47:32 +00:00
|
|
|
path_from_row_pks,
|
2018-05-13 12:44:22 +00:00
|
|
|
path_with_added_args,
|
2019-03-15 05:22:24 +00:00
|
|
|
path_with_removed_args,
|
2018-06-15 06:51:23 +00:00
|
|
|
path_with_format,
|
|
|
|
resolve_table_and_format,
|
2018-08-16 00:58:56 +00:00
|
|
|
sqlite3,
|
2020-10-20 00:33:59 +00:00
|
|
|
HASH_LENGTH,
|
2018-05-13 12:44:22 +00:00
|
|
|
)
|
2019-06-24 03:13:09 +00:00
|
|
|
from datasette.utils.asgi import (
|
|
|
|
AsgiStream,
|
2020-06-07 05:30:36 +00:00
|
|
|
Forbidden,
|
2019-06-24 03:13:09 +00:00
|
|
|
NotFound,
|
|
|
|
Response,
|
2020-10-29 22:01:38 +00:00
|
|
|
BadRequest,
|
2019-06-24 03:13:09 +00:00
|
|
|
)
|
2018-05-13 12:44:22 +00:00
|
|
|
|
|
|
|
ureg = pint.UnitRegistry()
|
|
|
|
|
|
|
|
|
|
|
|
class DatasetteError(Exception):
|
2019-05-04 02:15:14 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
message,
|
|
|
|
title=None,
|
|
|
|
error_dict=None,
|
|
|
|
status=500,
|
|
|
|
template=None,
|
2020-11-30 21:24:23 +00:00
|
|
|
message_is_html=False,
|
2019-05-04 02:15:14 +00:00
|
|
|
):
|
2018-05-13 12:44:22 +00:00
|
|
|
self.message = message
|
|
|
|
self.title = title
|
|
|
|
self.error_dict = error_dict or {}
|
|
|
|
self.status = status
|
2020-11-30 21:24:23 +00:00
|
|
|
self.message_is_html = message_is_html
|
2018-05-13 12:44:22 +00:00
|
|
|
|
|
|
|
|
2020-06-28 23:06:30 +00:00
|
|
|
class BaseView:
|
2019-06-24 03:13:09 +00:00
|
|
|
ds = None
|
|
|
|
|
2020-10-24 23:09:18 +00:00
|
|
|
def __init__(self, datasette):
|
|
|
|
self.ds = datasette
|
|
|
|
|
2019-06-24 03:13:09 +00:00
|
|
|
async def head(self, *args, **kwargs):
|
|
|
|
response = await self.get(*args, **kwargs)
|
|
|
|
response.body = b""
|
|
|
|
return response
|
|
|
|
|
2020-06-08 18:59:11 +00:00
|
|
|
async def check_permission(self, request, action, resource=None):
|
2020-06-07 05:30:36 +00:00
|
|
|
ok = await self.ds.permission_allowed(
|
2020-09-02 22:24:55 +00:00
|
|
|
request.actor,
|
|
|
|
action,
|
|
|
|
resource=resource,
|
|
|
|
default=True,
|
2020-06-07 05:30:36 +00:00
|
|
|
)
|
|
|
|
if not ok:
|
|
|
|
raise Forbidden(action)
|
|
|
|
|
2020-06-30 23:40:50 +00:00
|
|
|
async def check_permissions(self, request, permissions):
|
2020-12-23 17:04:32 +00:00
|
|
|
"""permissions is a list of (action, resource) tuples or 'action' strings"""
|
2020-06-30 23:40:50 +00:00
|
|
|
for permission in permissions:
|
|
|
|
if isinstance(permission, str):
|
|
|
|
action = permission
|
|
|
|
resource = None
|
|
|
|
elif isinstance(permission, (tuple, list)) and len(permission) == 2:
|
|
|
|
action, resource = permission
|
|
|
|
else:
|
|
|
|
assert (
|
|
|
|
False
|
|
|
|
), "permission should be string or tuple of two items: {}".format(
|
|
|
|
repr(permission)
|
|
|
|
)
|
|
|
|
ok = await self.ds.permission_allowed(
|
2020-09-02 22:24:55 +00:00
|
|
|
request.actor,
|
|
|
|
action,
|
|
|
|
resource=resource,
|
|
|
|
default=None,
|
2020-06-30 23:40:50 +00:00
|
|
|
)
|
|
|
|
if ok is not None:
|
|
|
|
if ok:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise Forbidden(action)
|
|
|
|
|
2019-03-17 22:55:04 +00:00
|
|
|
def database_color(self, database):
|
2019-05-04 02:15:14 +00:00
|
|
|
return "ff0000"
|
2019-03-17 22:55:04 +00:00
|
|
|
|
2020-10-09 01:43:53 +00:00
|
|
|
async def options(self, request, *args, **kwargs):
|
|
|
|
return Response.text("Method not allowed", status=405)
|
|
|
|
|
2020-10-09 16:11:24 +00:00
|
|
|
async def put(self, request, *args, **kwargs):
|
|
|
|
return Response.text("Method not allowed", status=405)
|
|
|
|
|
|
|
|
async def patch(self, request, *args, **kwargs):
|
|
|
|
return Response.text("Method not allowed", status=405)
|
|
|
|
|
|
|
|
async def delete(self, request, *args, **kwargs):
|
|
|
|
return Response.text("Method not allowed", status=405)
|
|
|
|
|
2020-06-02 21:08:12 +00:00
|
|
|
async def dispatch_request(self, request, *args, **kwargs):
|
2020-12-18 22:34:05 +00:00
|
|
|
if self.ds:
|
|
|
|
await self.ds.refresh_schemas()
|
2020-06-28 23:06:30 +00:00
|
|
|
handler = getattr(self, request.method.lower(), None)
|
2020-06-29 00:25:35 +00:00
|
|
|
return await handler(request, *args, **kwargs)
|
2020-06-02 21:08:12 +00:00
|
|
|
|
2020-05-03 03:01:21 +00:00
|
|
|
async def render(self, templates, request, context=None):
|
|
|
|
context = context or {}
|
2020-10-31 16:21:22 +00:00
|
|
|
template = self.ds.jinja_env.select_template(templates)
|
2019-12-22 16:04:45 +00:00
|
|
|
template_context = {
|
2020-02-13 06:05:46 +00:00
|
|
|
**context,
|
|
|
|
**{
|
|
|
|
"database_color": self.database_color,
|
2020-10-31 16:21:22 +00:00
|
|
|
"select_templates": [
|
2020-11-15 23:24:22 +00:00
|
|
|
f"{'*' if template_name == template.name else ''}{template_name}"
|
2020-10-31 16:21:22 +00:00
|
|
|
for template_name in templates
|
|
|
|
],
|
2020-02-13 06:05:46 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
return Response.html(
|
2020-04-05 18:28:20 +00:00
|
|
|
await self.ds.render_template(
|
2020-10-31 16:21:22 +00:00
|
|
|
template, template_context, request=request, view_name=self.name
|
2020-04-05 18:28:20 +00:00
|
|
|
)
|
2020-02-13 06:05:46 +00:00
|
|
|
)
|
2018-05-13 12:44:22 +00:00
|
|
|
|
2020-06-28 23:06:30 +00:00
|
|
|
@classmethod
|
2020-06-28 23:47:40 +00:00
|
|
|
def as_view(cls, *class_args, **class_kwargs):
|
|
|
|
async def view(request, send):
|
2020-06-28 23:06:30 +00:00
|
|
|
self = view.view_class(*class_args, **class_kwargs)
|
2020-06-29 00:25:35 +00:00
|
|
|
return await self.dispatch_request(
|
2020-06-28 23:47:40 +00:00
|
|
|
request, **request.scope["url_route"]["kwargs"]
|
2020-06-28 23:06:30 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
view.view_class = cls
|
|
|
|
view.__doc__ = cls.__doc__
|
|
|
|
view.__module__ = cls.__module__
|
|
|
|
view.__name__ = cls.__name__
|
|
|
|
return view
|
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
|
2019-06-15 19:41:34 +00:00
|
|
|
class DataView(BaseView):
|
2019-05-04 02:15:14 +00:00
|
|
|
name = ""
|
2018-05-13 12:55:15 +00:00
|
|
|
re_named_parameter = re.compile(":([a-zA-Z0-9_]+)")
|
2018-05-13 12:44:22 +00:00
|
|
|
|
2020-12-03 00:44:03 +00:00
|
|
|
async def options(self, request, *args, **kwargs):
|
2019-06-24 03:13:09 +00:00
|
|
|
r = Response.text("ok")
|
2018-05-13 12:44:22 +00:00
|
|
|
if self.ds.cors:
|
2018-05-13 12:55:15 +00:00
|
|
|
r.headers["Access-Control-Allow-Origin"] = "*"
|
2018-05-13 12:44:22 +00:00
|
|
|
return r
|
|
|
|
|
2019-03-17 22:55:04 +00:00
|
|
|
def redirect(self, request, path, forward_querystring=True, remove_args=None):
|
2018-05-13 12:55:15 +00:00
|
|
|
if request.query_string and "?" not in path and forward_querystring:
|
2020-11-15 23:24:22 +00:00
|
|
|
path = f"{path}?{request.query_string}"
|
2019-03-17 22:55:04 +00:00
|
|
|
if remove_args:
|
|
|
|
path = path_with_removed_args(request, remove_args, path=path)
|
2019-06-24 03:13:09 +00:00
|
|
|
r = Response.redirect(path)
|
2020-11-15 23:24:22 +00:00
|
|
|
r.headers["Link"] = f"<{path}>; rel=preload"
|
2018-05-13 12:44:22 +00:00
|
|
|
if self.ds.cors:
|
2018-05-13 12:55:15 +00:00
|
|
|
r.headers["Access-Control-Allow-Origin"] = "*"
|
2018-05-13 12:44:22 +00:00
|
|
|
return r
|
|
|
|
|
2019-04-13 19:20:10 +00:00
|
|
|
async def data(self, request, database, hash, **kwargs):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2019-03-31 18:02:22 +00:00
|
|
|
async def resolve_db_name(self, request, db_name, **kwargs):
|
2018-05-13 12:44:22 +00:00
|
|
|
hash = None
|
|
|
|
name = None
|
2021-01-25 05:13:05 +00:00
|
|
|
db_name = urllib.parse.unquote_plus(db_name)
|
2019-10-18 22:51:07 +00:00
|
|
|
if db_name not in self.ds.databases and "-" in db_name:
|
|
|
|
# No matching DB found, maybe it's a name-hash?
|
|
|
|
name_bit, hash_bit = db_name.rsplit("-", 1)
|
|
|
|
if name_bit not in self.ds.databases:
|
2020-11-15 23:24:22 +00:00
|
|
|
raise NotFound(f"Database not found: {name}")
|
2019-10-18 22:51:07 +00:00
|
|
|
else:
|
|
|
|
name = name_bit
|
|
|
|
hash = hash_bit
|
2018-05-13 12:44:22 +00:00
|
|
|
else:
|
|
|
|
name = db_name
|
2021-01-25 05:13:05 +00:00
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
try:
|
2019-03-31 23:51:52 +00:00
|
|
|
db = self.ds.databases[name]
|
2018-05-13 12:44:22 +00:00
|
|
|
except KeyError:
|
2020-11-15 23:24:22 +00:00
|
|
|
raise NotFound(f"Database not found: {name}")
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2019-11-04 23:16:30 +00:00
|
|
|
# Verify the hash
|
2019-03-31 23:51:52 +00:00
|
|
|
expected = "000"
|
|
|
|
if db.hash is not None:
|
|
|
|
expected = db.hash[:HASH_LENGTH]
|
2019-05-04 02:15:14 +00:00
|
|
|
correct_hash_provided = expected == hash
|
2019-03-17 22:55:04 +00:00
|
|
|
|
|
|
|
if not correct_hash_provided:
|
2018-06-15 06:51:23 +00:00
|
|
|
if "table_and_format" in kwargs:
|
2019-05-04 02:15:14 +00:00
|
|
|
|
2019-03-31 18:02:22 +00:00
|
|
|
async def async_table_exists(t):
|
2019-05-27 04:56:43 +00:00
|
|
|
return await db.table_exists(t)
|
2019-05-04 02:15:14 +00:00
|
|
|
|
2019-03-31 18:02:22 +00:00
|
|
|
table, _format = await resolve_table_and_format(
|
2018-06-15 06:51:23 +00:00
|
|
|
table_and_format=urllib.parse.unquote_plus(
|
|
|
|
kwargs["table_and_format"]
|
|
|
|
),
|
2019-05-01 23:01:56 +00:00
|
|
|
table_exists=async_table_exists,
|
2019-05-04 02:15:14 +00:00
|
|
|
allowed_formats=self.ds.renderers.keys(),
|
2018-06-15 06:51:23 +00:00
|
|
|
)
|
|
|
|
kwargs["table"] = table
|
|
|
|
if _format:
|
2020-11-15 23:24:22 +00:00
|
|
|
kwargs["as_format"] = f".{_format}"
|
2019-06-24 03:13:09 +00:00
|
|
|
elif kwargs.get("table"):
|
2019-05-04 02:15:14 +00:00
|
|
|
kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"])
|
2018-07-08 05:21:51 +00:00
|
|
|
|
2020-11-15 23:24:22 +00:00
|
|
|
should_redirect = self.ds.urls.path(f"{name}-{expected}")
|
2019-06-24 03:13:09 +00:00
|
|
|
if kwargs.get("table"):
|
2019-05-04 02:15:14 +00:00
|
|
|
should_redirect += "/" + urllib.parse.quote_plus(kwargs["table"])
|
2019-06-24 03:13:09 +00:00
|
|
|
if kwargs.get("pk_path"):
|
2018-05-13 12:55:15 +00:00
|
|
|
should_redirect += "/" + kwargs["pk_path"]
|
2019-06-24 03:13:09 +00:00
|
|
|
if kwargs.get("as_format"):
|
2018-06-15 06:51:23 +00:00
|
|
|
should_redirect += kwargs["as_format"]
|
2019-06-24 03:13:09 +00:00
|
|
|
if kwargs.get("as_db"):
|
2018-05-13 12:55:15 +00:00
|
|
|
should_redirect += kwargs["as_db"]
|
|
|
|
|
2019-05-16 15:10:25 +00:00
|
|
|
if (
|
2020-11-24 22:06:32 +00:00
|
|
|
(self.ds.setting("hash_urls") or "_hash" in request.args)
|
2019-05-16 15:10:25 +00:00
|
|
|
and
|
|
|
|
# Redirect only if database is immutable
|
|
|
|
not self.ds.databases[name].is_mutable
|
|
|
|
):
|
2019-03-17 22:55:04 +00:00
|
|
|
return name, expected, correct_hash_provided, should_redirect
|
|
|
|
|
|
|
|
return name, expected, correct_hash_provided, None
|
2018-05-13 12:44:22 +00:00
|
|
|
|
|
|
|
def get_templates(self, database, table=None):
|
|
|
|
assert NotImplemented
|
|
|
|
|
|
|
|
async def get(self, request, db_name, **kwargs):
|
2019-10-30 18:49:01 +00:00
|
|
|
(
|
|
|
|
database,
|
|
|
|
hash,
|
|
|
|
correct_hash_provided,
|
|
|
|
should_redirect,
|
|
|
|
) = await self.resolve_db_name(request, db_name, **kwargs)
|
2018-05-13 12:44:22 +00:00
|
|
|
if should_redirect:
|
2019-03-17 22:55:04 +00:00
|
|
|
return self.redirect(request, should_redirect, remove_args={"_hash"})
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2019-05-04 02:15:14 +00:00
|
|
|
return await self.view_get(
|
|
|
|
request, database, hash, correct_hash_provided, **kwargs
|
|
|
|
)
|
2018-05-13 12:44:22 +00:00
|
|
|
|
2018-08-28 10:17:13 +00:00
|
|
|
async def as_csv(self, request, database, hash, **kwargs):
|
2018-06-18 03:21:02 +00:00
|
|
|
stream = request.args.get("_stream")
|
2021-06-01 16:12:32 +00:00
|
|
|
# Do not calculate facets or counts:
|
|
|
|
extra_parameters = [
|
|
|
|
"{}=1".format(key)
|
|
|
|
for key in ("_nofacet", "_nocount")
|
|
|
|
if not request.args.get(key)
|
|
|
|
]
|
|
|
|
if extra_parameters:
|
2021-06-01 15:49:50 +00:00
|
|
|
if not request.query_string:
|
2021-06-01 16:12:32 +00:00
|
|
|
new_query_string = "&".join(extra_parameters)
|
2021-06-01 15:49:50 +00:00
|
|
|
else:
|
2021-06-01 16:12:32 +00:00
|
|
|
new_query_string = (
|
|
|
|
request.query_string + "&" + "&".join(extra_parameters)
|
|
|
|
)
|
2021-06-01 15:49:50 +00:00
|
|
|
new_scope = dict(
|
|
|
|
request.scope, query_string=new_query_string.encode("latin-1")
|
|
|
|
)
|
|
|
|
request.scope = new_scope
|
2018-06-18 03:21:02 +00:00
|
|
|
if stream:
|
2021-06-03 04:45:03 +00:00
|
|
|
# Some quick soundness checks
|
2020-11-24 22:06:32 +00:00
|
|
|
if not self.ds.setting("allow_csv_stream"):
|
2020-10-29 22:01:38 +00:00
|
|
|
raise BadRequest("CSV streaming is disabled")
|
2018-06-18 03:21:02 +00:00
|
|
|
if request.args.get("_next"):
|
2020-10-29 22:01:38 +00:00
|
|
|
raise BadRequest("_next not allowed for CSV streaming")
|
2018-06-18 03:21:02 +00:00
|
|
|
kwargs["_size"] = "max"
|
|
|
|
# Fetch the first page
|
2018-05-13 12:44:22 +00:00
|
|
|
try:
|
2018-06-15 06:51:23 +00:00
|
|
|
response_or_template_contexts = await self.data(
|
2018-08-28 10:17:13 +00:00
|
|
|
request, database, hash, **kwargs
|
2018-06-15 06:51:23 +00:00
|
|
|
)
|
2019-06-24 03:13:09 +00:00
|
|
|
if isinstance(response_or_template_contexts, Response):
|
2018-06-15 06:51:23 +00:00
|
|
|
return response_or_template_contexts
|
2021-06-02 03:46:20 +00:00
|
|
|
elif len(response_or_template_contexts) == 4:
|
|
|
|
data, _, _, _ = response_or_template_contexts
|
2018-06-15 06:51:23 +00:00
|
|
|
else:
|
2019-04-13 19:20:10 +00:00
|
|
|
data, _, _ = response_or_template_contexts
|
2018-06-15 06:51:23 +00:00
|
|
|
except (sqlite3.OperationalError, InvalidSql) as e:
|
|
|
|
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
|
|
|
|
2020-12-23 17:04:32 +00:00
|
|
|
except sqlite3.OperationalError as e:
|
2018-06-15 06:51:23 +00:00
|
|
|
raise DatasetteError(str(e))
|
|
|
|
|
|
|
|
except DatasetteError:
|
|
|
|
raise
|
2018-06-18 03:21:02 +00:00
|
|
|
|
2018-06-15 06:51:23 +00:00
|
|
|
# Convert rows and columns to CSV
|
?_labels= and ?_label=COL to expand foreign keys in JSON/CSV
These new querystring arguments can be used to request expanded foreign keys
in both JSON and CSV formats.
?_labels=on turns on expansions for ALL foreign key columns
?_label=COLUMN1&_label=COLUMN2 can be used to pick specific columns to expand
e.g. `Street_Tree_List.json?_label=qSpecies&_label=qLegalStatus`
{
"rowid": 233,
"TreeID": 121240,
"qLegalStatus": {
"value" 2,
"label": "Private"
}
"qSpecies": {
"value": 16,
"label": "Sycamore"
}
"qAddress": "91 Commonwealth Ave",
...
}
The labels option also works for the HTML and CSV views.
HTML defaults to `?_labels=on`, so if you pass `?_labels=off` you can disable
foreign key expansion entirely - or you can use `?_label=COLUMN` to request
just specific columns.
If you expand labels on CSV you get additional columns in the output:
`/Street_Tree_List.csv?_label=qLegalStatus`
rowid,TreeID,qLegalStatus,qLegalStatus_label...
1,141565,1,Permitted Site...
2,232565,2,Undocumented...
I also refactored the existing foreign key expansion code.
Closes #233. Refs #266.
2018-06-16 22:18:57 +00:00
|
|
|
headings = data["columns"]
|
2018-06-17 22:56:55 +00:00
|
|
|
# if there are expanded_columns we need to add additional headings
|
|
|
|
expanded_columns = set(data.get("expanded_columns") or [])
|
|
|
|
if expanded_columns:
|
?_labels= and ?_label=COL to expand foreign keys in JSON/CSV
These new querystring arguments can be used to request expanded foreign keys
in both JSON and CSV formats.
?_labels=on turns on expansions for ALL foreign key columns
?_label=COLUMN1&_label=COLUMN2 can be used to pick specific columns to expand
e.g. `Street_Tree_List.json?_label=qSpecies&_label=qLegalStatus`
{
"rowid": 233,
"TreeID": 121240,
"qLegalStatus": {
"value" 2,
"label": "Private"
}
"qSpecies": {
"value": 16,
"label": "Sycamore"
}
"qAddress": "91 Commonwealth Ave",
...
}
The labels option also works for the HTML and CSV views.
HTML defaults to `?_labels=on`, so if you pass `?_labels=off` you can disable
foreign key expansion entirely - or you can use `?_label=COLUMN` to request
just specific columns.
If you expand labels on CSV you get additional columns in the output:
`/Street_Tree_List.csv?_label=qLegalStatus`
rowid,TreeID,qLegalStatus,qLegalStatus_label...
1,141565,1,Permitted Site...
2,232565,2,Undocumented...
I also refactored the existing foreign key expansion code.
Closes #233. Refs #266.
2018-06-16 22:18:57 +00:00
|
|
|
headings = []
|
|
|
|
for column in data["columns"]:
|
|
|
|
headings.append(column)
|
2018-06-17 22:56:55 +00:00
|
|
|
if column in expanded_columns:
|
2020-11-15 23:24:22 +00:00
|
|
|
headings.append(f"{column}_label")
|
?_labels= and ?_label=COL to expand foreign keys in JSON/CSV
These new querystring arguments can be used to request expanded foreign keys
in both JSON and CSV formats.
?_labels=on turns on expansions for ALL foreign key columns
?_label=COLUMN1&_label=COLUMN2 can be used to pick specific columns to expand
e.g. `Street_Tree_List.json?_label=qSpecies&_label=qLegalStatus`
{
"rowid": 233,
"TreeID": 121240,
"qLegalStatus": {
"value" 2,
"label": "Private"
}
"qSpecies": {
"value": 16,
"label": "Sycamore"
}
"qAddress": "91 Commonwealth Ave",
...
}
The labels option also works for the HTML and CSV views.
HTML defaults to `?_labels=on`, so if you pass `?_labels=off` you can disable
foreign key expansion entirely - or you can use `?_label=COLUMN` to request
just specific columns.
If you expand labels on CSV you get additional columns in the output:
`/Street_Tree_List.csv?_label=qLegalStatus`
rowid,TreeID,qLegalStatus,qLegalStatus_label...
1,141565,1,Permitted Site...
2,232565,2,Undocumented...
I also refactored the existing foreign key expansion code.
Closes #233. Refs #266.
2018-06-16 22:18:57 +00:00
|
|
|
|
2021-06-01 15:49:50 +00:00
|
|
|
content_type = "text/plain; charset=utf-8"
|
|
|
|
preamble = ""
|
|
|
|
postamble = ""
|
|
|
|
|
|
|
|
trace = request.args.get("_trace")
|
|
|
|
if trace:
|
|
|
|
content_type = "text/html; charset=utf-8"
|
|
|
|
preamble = (
|
|
|
|
"<html><head><title>CSV debug</title></head>"
|
|
|
|
'<body><textarea style="width: 90%; height: 70vh">'
|
|
|
|
)
|
|
|
|
postamble = "</textarea></body></html>"
|
|
|
|
|
2018-06-15 06:51:23 +00:00
|
|
|
async def stream_fn(r):
|
2021-06-01 15:49:50 +00:00
|
|
|
nonlocal data, trace
|
|
|
|
limited_writer = LimitedWriter(r, self.ds.setting("max_csv_mb"))
|
|
|
|
if trace:
|
|
|
|
await limited_writer.write(preamble)
|
|
|
|
writer = csv.writer(EscapeHtmlWriter(limited_writer))
|
|
|
|
else:
|
|
|
|
writer = csv.writer(limited_writer)
|
2018-06-18 03:21:02 +00:00
|
|
|
first = True
|
|
|
|
next = None
|
|
|
|
while first or (next and stream):
|
|
|
|
try:
|
|
|
|
if next:
|
|
|
|
kwargs["_next"] = next
|
|
|
|
if not first:
|
2019-05-04 02:15:14 +00:00
|
|
|
data, _, _ = await self.data(request, database, hash, **kwargs)
|
2018-06-18 03:21:02 +00:00
|
|
|
if first:
|
2020-12-10 23:28:44 +00:00
|
|
|
if request.args.get("_header") != "off":
|
|
|
|
await writer.writerow(headings)
|
2018-06-18 03:21:02 +00:00
|
|
|
first = False
|
|
|
|
next = data.get("next")
|
|
|
|
for row in data["rows"]:
|
2020-10-29 22:47:32 +00:00
|
|
|
if any(isinstance(r, bytes) for r in row):
|
|
|
|
new_row = []
|
|
|
|
for column, cell in zip(headings, row):
|
|
|
|
if isinstance(cell, bytes):
|
|
|
|
# If this is a table page, use .urls.row_blob()
|
|
|
|
if data.get("table"):
|
|
|
|
pks = data.get("primary_keys") or []
|
|
|
|
cell = self.ds.absolute_url(
|
|
|
|
request,
|
|
|
|
self.ds.urls.row_blob(
|
|
|
|
database,
|
|
|
|
data["table"],
|
|
|
|
path_from_row_pks(row, pks, not pks),
|
|
|
|
column,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# Otherwise generate URL for this query
|
2021-06-01 22:35:33 +00:00
|
|
|
url = self.ds.absolute_url(
|
2020-10-29 22:47:32 +00:00
|
|
|
request,
|
|
|
|
path_with_format(
|
2020-10-31 18:16:28 +00:00
|
|
|
request=request,
|
|
|
|
format="blob",
|
2020-10-29 22:47:32 +00:00
|
|
|
extra_qs={
|
|
|
|
"_blob_column": column,
|
|
|
|
"_blob_hash": hashlib.sha256(
|
|
|
|
cell
|
|
|
|
).hexdigest(),
|
|
|
|
},
|
|
|
|
replace_format="csv",
|
|
|
|
),
|
|
|
|
)
|
2021-06-01 22:35:33 +00:00
|
|
|
cell = url.replace("&_nocount=1", "").replace(
|
|
|
|
"&_nofacet=1", ""
|
|
|
|
)
|
2020-10-29 22:47:32 +00:00
|
|
|
new_row.append(cell)
|
|
|
|
row = new_row
|
2018-06-18 03:21:02 +00:00
|
|
|
if not expanded_columns:
|
|
|
|
# Simple path
|
2019-06-24 03:13:09 +00:00
|
|
|
await writer.writerow(row)
|
?_labels= and ?_label=COL to expand foreign keys in JSON/CSV
These new querystring arguments can be used to request expanded foreign keys
in both JSON and CSV formats.
?_labels=on turns on expansions for ALL foreign key columns
?_label=COLUMN1&_label=COLUMN2 can be used to pick specific columns to expand
e.g. `Street_Tree_List.json?_label=qSpecies&_label=qLegalStatus`
{
"rowid": 233,
"TreeID": 121240,
"qLegalStatus": {
"value" 2,
"label": "Private"
}
"qSpecies": {
"value": 16,
"label": "Sycamore"
}
"qAddress": "91 Commonwealth Ave",
...
}
The labels option also works for the HTML and CSV views.
HTML defaults to `?_labels=on`, so if you pass `?_labels=off` you can disable
foreign key expansion entirely - or you can use `?_label=COLUMN` to request
just specific columns.
If you expand labels on CSV you get additional columns in the output:
`/Street_Tree_List.csv?_label=qLegalStatus`
rowid,TreeID,qLegalStatus,qLegalStatus_label...
1,141565,1,Permitted Site...
2,232565,2,Undocumented...
I also refactored the existing foreign key expansion code.
Closes #233. Refs #266.
2018-06-16 22:18:57 +00:00
|
|
|
else:
|
2018-06-18 03:21:02 +00:00
|
|
|
# Look for {"value": "label": } dicts and expand
|
|
|
|
new_row = []
|
2019-11-02 23:12:46 +00:00
|
|
|
for heading, cell in zip(data["columns"], row):
|
|
|
|
if heading in expanded_columns:
|
|
|
|
if cell is None:
|
|
|
|
new_row.extend(("", ""))
|
|
|
|
else:
|
|
|
|
assert isinstance(cell, dict)
|
|
|
|
new_row.append(cell["value"])
|
|
|
|
new_row.append(cell["label"])
|
2018-06-18 03:21:02 +00:00
|
|
|
else:
|
|
|
|
new_row.append(cell)
|
2019-06-24 03:13:09 +00:00
|
|
|
await writer.writerow(new_row)
|
2018-06-18 03:21:02 +00:00
|
|
|
except Exception as e:
|
2020-12-05 05:21:11 +00:00
|
|
|
sys.stderr.write("Caught this error: {}\n".format(e))
|
|
|
|
sys.stderr.flush()
|
2019-06-24 03:13:09 +00:00
|
|
|
await r.write(str(e))
|
2018-06-18 03:21:02 +00:00
|
|
|
return
|
2021-06-01 15:49:50 +00:00
|
|
|
await limited_writer.write(postamble)
|
2018-06-15 06:51:23 +00:00
|
|
|
|
|
|
|
headers = {}
|
2018-06-24 00:59:37 +00:00
|
|
|
if self.ds.cors:
|
|
|
|
headers["Access-Control-Allow-Origin"] = "*"
|
2018-06-15 06:51:23 +00:00
|
|
|
if request.args.get("_dl", None):
|
2021-06-01 15:49:50 +00:00
|
|
|
if not trace:
|
|
|
|
content_type = "text/csv; charset=utf-8"
|
2018-06-15 06:51:23 +00:00
|
|
|
disposition = 'attachment; filename="{}.csv"'.format(
|
2019-05-04 02:15:14 +00:00
|
|
|
kwargs.get("table", database)
|
2018-06-15 06:51:23 +00:00
|
|
|
)
|
2020-07-29 21:34:22 +00:00
|
|
|
headers["content-disposition"] = disposition
|
2018-06-15 06:51:23 +00:00
|
|
|
|
2019-06-24 03:13:09 +00:00
|
|
|
return AsgiStream(stream_fn, headers=headers, content_type=content_type)
|
2018-06-15 06:51:23 +00:00
|
|
|
|
2019-05-01 23:01:56 +00:00
|
|
|
async def get_format(self, request, database, args):
|
2020-09-02 22:24:55 +00:00
|
|
|
"""Determine the format of the response from the request, from URL
|
|
|
|
parameters or from a file extension.
|
2019-05-01 23:01:56 +00:00
|
|
|
|
2020-09-02 22:24:55 +00:00
|
|
|
`args` is a dict of the path components parsed from the URL by the router.
|
2019-05-01 23:01:56 +00:00
|
|
|
"""
|
2018-06-15 06:51:23 +00:00
|
|
|
# If ?_format= is provided, use that as the format
|
|
|
|
_format = request.args.get("_format", None)
|
|
|
|
if not _format:
|
2019-05-01 23:01:56 +00:00
|
|
|
_format = (args.pop("as_format", None) or "").lstrip(".")
|
2019-10-21 02:03:08 +00:00
|
|
|
else:
|
|
|
|
args.pop("as_format", None)
|
2019-05-01 23:01:56 +00:00
|
|
|
if "table_and_format" in args:
|
2019-05-27 04:56:43 +00:00
|
|
|
db = self.ds.databases[database]
|
2019-05-04 02:15:14 +00:00
|
|
|
|
2019-03-31 18:02:22 +00:00
|
|
|
async def async_table_exists(t):
|
2019-05-27 04:56:43 +00:00
|
|
|
return await db.table_exists(t)
|
2019-05-04 02:15:14 +00:00
|
|
|
|
2019-03-31 18:02:22 +00:00
|
|
|
table, _ext_format = await resolve_table_and_format(
|
2019-05-04 02:15:14 +00:00
|
|
|
table_and_format=urllib.parse.unquote_plus(args["table_and_format"]),
|
2019-05-01 23:01:56 +00:00
|
|
|
table_exists=async_table_exists,
|
2019-05-04 02:15:14 +00:00
|
|
|
allowed_formats=self.ds.renderers.keys(),
|
2018-06-15 06:51:23 +00:00
|
|
|
)
|
|
|
|
_format = _format or _ext_format
|
2019-05-01 23:01:56 +00:00
|
|
|
args["table"] = table
|
|
|
|
del args["table_and_format"]
|
|
|
|
elif "table" in args:
|
2019-05-04 02:15:14 +00:00
|
|
|
args["table"] = urllib.parse.unquote_plus(args["table"])
|
2019-05-01 23:01:56 +00:00
|
|
|
return _format, args
|
|
|
|
|
|
|
|
async def view_get(self, request, database, hash, correct_hash_provided, **kwargs):
|
|
|
|
_format, kwargs = await self.get_format(request, database, kwargs)
|
2018-06-15 06:51:23 +00:00
|
|
|
|
|
|
|
if _format == "csv":
|
2018-08-28 10:17:13 +00:00
|
|
|
return await self.as_csv(request, database, hash, **kwargs)
|
2018-06-15 06:51:23 +00:00
|
|
|
|
?_labels= and ?_label=COL to expand foreign keys in JSON/CSV
These new querystring arguments can be used to request expanded foreign keys
in both JSON and CSV formats.
?_labels=on turns on expansions for ALL foreign key columns
?_label=COLUMN1&_label=COLUMN2 can be used to pick specific columns to expand
e.g. `Street_Tree_List.json?_label=qSpecies&_label=qLegalStatus`
{
"rowid": 233,
"TreeID": 121240,
"qLegalStatus": {
"value" 2,
"label": "Private"
}
"qSpecies": {
"value": 16,
"label": "Sycamore"
}
"qAddress": "91 Commonwealth Ave",
...
}
The labels option also works for the HTML and CSV views.
HTML defaults to `?_labels=on`, so if you pass `?_labels=off` you can disable
foreign key expansion entirely - or you can use `?_label=COLUMN` to request
just specific columns.
If you expand labels on CSV you get additional columns in the output:
`/Street_Tree_List.csv?_label=qLegalStatus`
rowid,TreeID,qLegalStatus,qLegalStatus_label...
1,141565,1,Permitted Site...
2,232565,2,Undocumented...
I also refactored the existing foreign key expansion code.
Closes #233. Refs #266.
2018-06-16 22:18:57 +00:00
|
|
|
if _format is None:
|
2019-07-26 10:25:44 +00:00
|
|
|
# HTML views default to expanding all foreign key labels
|
2019-05-04 02:15:14 +00:00
|
|
|
kwargs["default_labels"] = True
|
?_labels= and ?_label=COL to expand foreign keys in JSON/CSV
These new querystring arguments can be used to request expanded foreign keys
in both JSON and CSV formats.
?_labels=on turns on expansions for ALL foreign key columns
?_label=COLUMN1&_label=COLUMN2 can be used to pick specific columns to expand
e.g. `Street_Tree_List.json?_label=qSpecies&_label=qLegalStatus`
{
"rowid": 233,
"TreeID": 121240,
"qLegalStatus": {
"value" 2,
"label": "Private"
}
"qSpecies": {
"value": 16,
"label": "Sycamore"
}
"qAddress": "91 Commonwealth Ave",
...
}
The labels option also works for the HTML and CSV views.
HTML defaults to `?_labels=on`, so if you pass `?_labels=off` you can disable
foreign key expansion entirely - or you can use `?_label=COLUMN` to request
just specific columns.
If you expand labels on CSV you get additional columns in the output:
`/Street_Tree_List.csv?_label=qLegalStatus`
rowid,TreeID,qLegalStatus,qLegalStatus_label...
1,141565,1,Permitted Site...
2,232565,2,Undocumented...
I also refactored the existing foreign key expansion code.
Closes #233. Refs #266.
2018-06-16 22:18:57 +00:00
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
extra_template_data = {}
|
2020-12-21 21:49:14 +00:00
|
|
|
start = time.perf_counter()
|
2021-06-02 03:46:20 +00:00
|
|
|
status_code = None
|
2018-05-13 12:44:22 +00:00
|
|
|
templates = []
|
|
|
|
try:
|
|
|
|
response_or_template_contexts = await self.data(
|
2018-08-28 10:17:13 +00:00
|
|
|
request, database, hash, **kwargs
|
2018-05-13 12:44:22 +00:00
|
|
|
)
|
2019-06-24 03:13:09 +00:00
|
|
|
if isinstance(response_or_template_contexts, Response):
|
2018-05-13 12:44:22 +00:00
|
|
|
return response_or_template_contexts
|
2021-06-02 03:46:20 +00:00
|
|
|
# If it has four items, it includes an HTTP status code
|
|
|
|
if len(response_or_template_contexts) == 4:
|
|
|
|
(
|
|
|
|
data,
|
|
|
|
extra_template_data,
|
|
|
|
templates,
|
|
|
|
status_code,
|
|
|
|
) = response_or_template_contexts
|
2018-05-13 12:44:22 +00:00
|
|
|
else:
|
|
|
|
data, extra_template_data, templates = response_or_template_contexts
|
2019-05-28 00:16:36 +00:00
|
|
|
except QueryInterrupted:
|
2019-05-04 02:15:14 +00:00
|
|
|
raise DatasetteError(
|
|
|
|
"""
|
2018-05-28 21:24:19 +00:00
|
|
|
SQL query took too long. The time limit is controlled by the
|
2020-08-15 23:57:05 +00:00
|
|
|
<a href="https://docs.datasette.io/en/stable/config.html#sql-time-limit-ms">sql_time_limit_ms</a>
|
2018-05-28 21:24:19 +00:00
|
|
|
configuration option.
|
2019-05-04 02:15:14 +00:00
|
|
|
""",
|
|
|
|
title="SQL Interrupted",
|
|
|
|
status=400,
|
2020-11-30 21:24:23 +00:00
|
|
|
message_is_html=True,
|
2019-05-04 02:15:14 +00:00
|
|
|
)
|
2018-05-13 12:44:22 +00:00
|
|
|
except (sqlite3.OperationalError, InvalidSql) as e:
|
2018-05-13 12:55:15 +00:00
|
|
|
raise DatasetteError(str(e), title="Invalid SQL", status=400)
|
|
|
|
|
2020-12-23 17:04:32 +00:00
|
|
|
except sqlite3.OperationalError as e:
|
2018-05-13 12:44:22 +00:00
|
|
|
raise DatasetteError(str(e))
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
except DatasetteError:
|
|
|
|
raise
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2020-12-21 21:49:14 +00:00
|
|
|
end = time.perf_counter()
|
2018-05-13 12:55:15 +00:00
|
|
|
data["query_ms"] = (end - start) * 1000
|
|
|
|
for key in ("source", "source_url", "license", "license_url"):
|
2018-08-13 14:56:50 +00:00
|
|
|
value = self.ds.metadata(key)
|
2018-05-13 12:44:22 +00:00
|
|
|
if value:
|
|
|
|
data[key] = value
|
2019-05-01 23:01:56 +00:00
|
|
|
|
|
|
|
# Special case for .jsono extension - redirect to _shape=objects
|
|
|
|
if _format == "jsono":
|
|
|
|
return self.redirect(
|
|
|
|
request,
|
|
|
|
path_with_added_args(
|
2018-05-13 12:44:22 +00:00
|
|
|
request,
|
2019-05-01 23:01:56 +00:00
|
|
|
{"_shape": "objects"},
|
|
|
|
path=request.path.rsplit(".jsono", 1)[0] + ".json",
|
|
|
|
),
|
|
|
|
forward_querystring=False,
|
|
|
|
)
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2019-05-01 23:01:56 +00:00
|
|
|
if _format in self.ds.renderers.keys():
|
|
|
|
# Dispatch request to the correct output format renderer
|
|
|
|
# (CSV is not handled here due to streaming)
|
2020-05-28 02:21:41 +00:00
|
|
|
result = call_with_supported_arguments(
|
2020-05-28 05:57:05 +00:00
|
|
|
self.ds.renderers[_format][0],
|
2020-05-28 02:21:41 +00:00
|
|
|
datasette=self.ds,
|
|
|
|
columns=data.get("columns") or [],
|
|
|
|
rows=data.get("rows") or [],
|
|
|
|
sql=data.get("query", {}).get("sql", None),
|
|
|
|
query_name=data.get("query_name"),
|
|
|
|
database=database,
|
|
|
|
table=data.get("table"),
|
|
|
|
request=request,
|
|
|
|
view_name=self.name,
|
|
|
|
# These will be deprecated in Datasette 1.0:
|
|
|
|
args=request.args,
|
|
|
|
data=data,
|
|
|
|
)
|
2020-05-28 02:43:30 +00:00
|
|
|
if asyncio.iscoroutine(result):
|
|
|
|
result = await result
|
2019-05-01 23:01:56 +00:00
|
|
|
if result is None:
|
|
|
|
raise NotFound("No data")
|
2020-08-28 04:02:50 +00:00
|
|
|
if isinstance(result, dict):
|
|
|
|
r = Response(
|
|
|
|
body=result.get("body"),
|
2021-06-02 03:46:20 +00:00
|
|
|
status=result.get("status_code", status_code or 200),
|
2020-08-28 04:02:50 +00:00
|
|
|
content_type=result.get("content_type", "text/plain"),
|
|
|
|
headers=result.get("headers"),
|
|
|
|
)
|
|
|
|
elif isinstance(result, Response):
|
|
|
|
r = result
|
2021-06-02 03:46:20 +00:00
|
|
|
if status_code is not None:
|
|
|
|
# Over-ride the status code
|
|
|
|
r.status = status_code
|
2020-08-28 04:02:50 +00:00
|
|
|
else:
|
2020-11-15 23:24:22 +00:00
|
|
|
assert False, f"{result} should be dict or Response"
|
2018-05-13 12:44:22 +00:00
|
|
|
else:
|
|
|
|
extras = {}
|
|
|
|
if callable(extra_template_data):
|
|
|
|
extras = extra_template_data()
|
|
|
|
if asyncio.iscoroutine(extras):
|
|
|
|
extras = await extras
|
|
|
|
else:
|
|
|
|
extras = extra_template_data
|
2018-06-17 22:56:55 +00:00
|
|
|
url_labels_extra = {}
|
|
|
|
if data.get("expandable_columns"):
|
|
|
|
url_labels_extra = {"_labels": "on"}
|
2019-05-01 23:01:56 +00:00
|
|
|
|
2020-05-28 05:57:05 +00:00
|
|
|
renderers = {}
|
|
|
|
for key, (_, can_render) in self.ds.renderers.items():
|
|
|
|
it_can_render = call_with_supported_arguments(
|
|
|
|
can_render,
|
|
|
|
datasette=self.ds,
|
|
|
|
columns=data.get("columns") or [],
|
|
|
|
rows=data.get("rows") or [],
|
|
|
|
sql=data.get("query", {}).get("sql", None),
|
|
|
|
query_name=data.get("query_name"),
|
|
|
|
database=database,
|
|
|
|
table=data.get("table"),
|
|
|
|
request=request,
|
|
|
|
view_name=self.name,
|
|
|
|
)
|
2020-09-02 22:21:12 +00:00
|
|
|
it_can_render = await await_me_maybe(it_can_render)
|
2020-05-28 05:57:05 +00:00
|
|
|
if it_can_render:
|
|
|
|
renderers[key] = path_with_format(
|
2020-10-31 18:16:28 +00:00
|
|
|
request=request, format=key, extra_qs={**url_labels_extra}
|
2020-05-28 05:57:05 +00:00
|
|
|
)
|
|
|
|
|
2019-05-04 02:15:14 +00:00
|
|
|
url_csv_args = {"_size": "max", **url_labels_extra}
|
2020-10-31 18:16:28 +00:00
|
|
|
url_csv = path_with_format(
|
|
|
|
request=request, format="csv", extra_qs=url_csv_args
|
|
|
|
)
|
2019-05-04 02:15:14 +00:00
|
|
|
url_csv_path = url_csv.split("?")[0]
|
2018-05-13 12:44:22 +00:00
|
|
|
context = {
|
|
|
|
**data,
|
|
|
|
**extras,
|
|
|
|
**{
|
2019-05-01 23:01:56 +00:00
|
|
|
"renderers": renderers,
|
2018-06-18 06:03:22 +00:00
|
|
|
"url_csv": url_csv,
|
|
|
|
"url_csv_path": url_csv_path,
|
2019-01-03 02:43:56 +00:00
|
|
|
"url_csv_hidden_args": [
|
|
|
|
(key, value)
|
|
|
|
for key, value in urllib.parse.parse_qsl(request.query_string)
|
|
|
|
if key not in ("_labels", "_facet", "_size")
|
2019-05-04 02:15:14 +00:00
|
|
|
]
|
|
|
|
+ [("_size", "max")],
|
2018-05-13 12:55:15 +00:00
|
|
|
"datasette_version": __version__,
|
2018-08-11 20:06:45 +00:00
|
|
|
"config": self.ds.config_dict(),
|
2019-05-04 02:15:14 +00:00
|
|
|
},
|
2018-05-13 12:44:22 +00:00
|
|
|
}
|
2018-05-13 12:55:15 +00:00
|
|
|
if "metadata" not in context:
|
|
|
|
context["metadata"] = self.ds.metadata
|
2019-07-06 00:05:56 +00:00
|
|
|
r = await self.render(templates, request=request, context=context)
|
2021-06-02 03:46:20 +00:00
|
|
|
if status_code is not None:
|
|
|
|
r.status = status_code
|
2019-05-01 23:01:56 +00:00
|
|
|
|
|
|
|
ttl = request.args.get("_ttl", None)
|
|
|
|
if ttl is None or not ttl.isdigit():
|
|
|
|
if correct_hash_provided:
|
2020-11-24 22:06:32 +00:00
|
|
|
ttl = self.ds.setting("default_cache_ttl_hashed")
|
2018-05-26 22:17:33 +00:00
|
|
|
else:
|
2020-11-24 22:06:32 +00:00
|
|
|
ttl = self.ds.setting("default_cache_ttl")
|
2019-05-01 23:01:56 +00:00
|
|
|
|
|
|
|
return self.set_response_headers(r, ttl)
|
|
|
|
|
|
|
|
def set_response_headers(self, response, ttl):
|
|
|
|
# Set far-future cache expiry
|
|
|
|
if self.ds.cache_headers and response.status == 200:
|
|
|
|
ttl = int(ttl)
|
2018-05-26 22:17:33 +00:00
|
|
|
if ttl == 0:
|
2019-05-04 02:15:14 +00:00
|
|
|
ttl_header = "no-cache"
|
2018-05-26 22:17:33 +00:00
|
|
|
else:
|
2020-11-15 23:24:22 +00:00
|
|
|
ttl_header = f"max-age={ttl}"
|
2019-05-01 23:01:56 +00:00
|
|
|
response.headers["Cache-Control"] = ttl_header
|
|
|
|
response.headers["Referrer-Policy"] = "no-referrer"
|
|
|
|
if self.ds.cors:
|
|
|
|
response.headers["Access-Control-Allow-Origin"] = "*"
|
|
|
|
return response
|