2018-05-25 00:15:37 +00:00
|
|
|
import asyncio
|
2018-07-10 15:13:27 +00:00
|
|
|
import click
|
2018-05-20 17:01:49 +00:00
|
|
|
import collections
|
2018-05-13 12:58:28 +00:00
|
|
|
import hashlib
|
2018-04-18 03:12:21 +00:00
|
|
|
import itertools
|
2018-05-13 12:58:28 +00:00
|
|
|
import os
|
|
|
|
import sqlite3
|
2018-05-02 08:46:54 +00:00
|
|
|
import sys
|
2018-05-25 00:15:37 +00:00
|
|
|
import threading
|
2018-04-14 13:17:20 +00:00
|
|
|
import traceback
|
2018-05-13 12:58:28 +00:00
|
|
|
import urllib.parse
|
|
|
|
from concurrent import futures
|
|
|
|
from pathlib import Path
|
|
|
|
|
2018-05-28 21:24:19 +00:00
|
|
|
from markupsafe import Markup
|
2018-05-13 12:58:28 +00:00
|
|
|
from jinja2 import ChoiceLoader, Environment, FileSystemLoader, PrefixLoader
|
|
|
|
from sanic import Sanic, response
|
|
|
|
from sanic.exceptions import InvalidUsage, NotFound
|
|
|
|
|
2018-05-21 08:02:34 +00:00
|
|
|
from .views.base import (
|
2018-05-13 12:58:28 +00:00
|
|
|
DatasetteError,
|
|
|
|
ureg
|
|
|
|
)
|
2018-05-21 08:02:34 +00:00
|
|
|
from .views.database import DatabaseDownload, DatabaseView
|
|
|
|
from .views.index import IndexView
|
2018-06-07 15:22:29 +00:00
|
|
|
from .views.special import JsonDataView
|
2018-05-21 08:02:34 +00:00
|
|
|
from .views.table import RowView, TableView
|
2018-05-13 12:58:28 +00:00
|
|
|
|
2017-11-10 19:25:54 +00:00
|
|
|
from .utils import (
|
2018-05-25 00:15:37 +00:00
|
|
|
InterruptedError,
|
|
|
|
Results,
|
2017-11-11 05:55:50 +00:00
|
|
|
escape_css_string,
|
2018-04-03 13:39:50 +00:00
|
|
|
escape_sqlite,
|
2018-04-19 05:24:48 +00:00
|
|
|
get_plugins,
|
2018-04-16 05:22:01 +00:00
|
|
|
module_from_path,
|
2018-05-25 00:15:37 +00:00
|
|
|
sqlite_timelimit,
|
2018-05-13 12:58:28 +00:00
|
|
|
to_css_class
|
2017-11-10 19:25:54 +00:00
|
|
|
)
|
2018-05-21 08:02:34 +00:00
|
|
|
from .inspect import inspect_hash, inspect_views, inspect_tables
|
2018-08-05 00:14:56 +00:00
|
|
|
from .plugins import pm
|
2017-11-16 15:20:54 +00:00
|
|
|
from .version import __version__
|
2017-10-23 16:02:40 +00:00
|
|
|
|
2017-10-27 07:08:24 +00:00
|
|
|
app_root = Path(__file__).parent.parent
|
2017-10-23 16:02:40 +00:00
|
|
|
|
2018-05-25 00:15:37 +00:00
|
|
|
connections = threading.local()
|
2017-10-23 16:02:40 +00:00
|
|
|
|
2018-04-16 00:56:15 +00:00
|
|
|
|
2018-05-20 17:01:49 +00:00
|
|
|
ConfigOption = collections.namedtuple(
|
|
|
|
"ConfigOption", ("name", "default", "help")
|
|
|
|
)
|
|
|
|
CONFIG_OPTIONS = (
|
|
|
|
ConfigOption("default_page_size", 100, """
|
|
|
|
Default page size for the table view
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("max_returned_rows", 1000, """
|
|
|
|
Maximum rows that can be returned from a table or custom query
|
|
|
|
""".strip()),
|
2018-05-27 00:43:22 +00:00
|
|
|
ConfigOption("num_sql_threads", 3, """
|
|
|
|
Number of threads in the thread pool for executing SQLite queries
|
|
|
|
""".strip()),
|
2018-05-20 17:01:49 +00:00
|
|
|
ConfigOption("sql_time_limit_ms", 1000, """
|
|
|
|
Time limit for a SQL query in milliseconds
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("default_facet_size", 30, """
|
|
|
|
Number of values to return for requested facets
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("facet_time_limit_ms", 200, """
|
|
|
|
Time limit for calculating a requested facet
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("facet_suggest_time_limit_ms", 50, """
|
|
|
|
Time limit for calculating a suggested facet
|
|
|
|
""".strip()),
|
2018-05-25 01:12:27 +00:00
|
|
|
ConfigOption("allow_facet", True, """
|
|
|
|
Allow users to specify columns to facet using ?_facet= parameter
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("allow_download", True, """
|
|
|
|
Allow users to download the original SQLite database files
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("suggest_facets", True, """
|
|
|
|
Calculate and display suggested facets
|
|
|
|
""".strip()),
|
2018-05-25 05:50:50 +00:00
|
|
|
ConfigOption("allow_sql", True, """
|
|
|
|
Allow arbitrary SQL queries via ?sql= parameter
|
|
|
|
""".strip()),
|
2018-05-26 22:17:33 +00:00
|
|
|
ConfigOption("default_cache_ttl", 365 * 24 * 60 * 60, """
|
|
|
|
Default HTTP cache TTL (used in Cache-Control: max-age= header)
|
|
|
|
""".strip()),
|
2018-06-04 16:02:07 +00:00
|
|
|
ConfigOption("cache_size_kb", 0, """
|
|
|
|
SQLite cache size in KB (0 == use SQLite default)
|
|
|
|
""".strip()),
|
2018-06-18 03:21:02 +00:00
|
|
|
ConfigOption("allow_csv_stream", True, """
|
|
|
|
Allow .csv?_stream=1 to download all rows (ignoring max_returned_rows)
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("max_csv_mb", 100, """
|
2018-07-23 15:58:29 +00:00
|
|
|
Maximum size allowed for CSV export in MB - set 0 to disable this limit
|
2018-06-18 03:21:02 +00:00
|
|
|
""".strip()),
|
2018-07-10 16:20:41 +00:00
|
|
|
ConfigOption("truncate_cells_html", 2048, """
|
2018-07-23 15:58:29 +00:00
|
|
|
Truncate cells longer than this in HTML table view - set 0 to disable
|
|
|
|
""".strip()),
|
|
|
|
ConfigOption("force_https_urls", False, """
|
|
|
|
Force URLs in API output to always use https:// protocol
|
2018-07-10 16:20:41 +00:00
|
|
|
""".strip()),
|
2018-05-20 17:01:49 +00:00
|
|
|
)
|
|
|
|
DEFAULT_CONFIG = {
|
|
|
|
option.name: option.default
|
|
|
|
for option in CONFIG_OPTIONS
|
2018-05-18 05:08:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-10-24 02:00:37 +00:00
|
|
|
async def favicon(request):
|
2018-05-13 12:55:15 +00:00
|
|
|
return response.text("")
|
2017-10-24 02:00:37 +00:00
|
|
|
|
|
|
|
|
2017-11-10 19:05:57 +00:00
|
|
|
class Datasette:
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2017-11-13 19:33:01 +00:00
|
|
|
def __init__(
|
2018-05-13 12:55:15 +00:00
|
|
|
self,
|
|
|
|
files,
|
|
|
|
cache_headers=True,
|
|
|
|
cors=False,
|
|
|
|
inspect_data=None,
|
|
|
|
metadata=None,
|
|
|
|
sqlite_extensions=None,
|
|
|
|
template_dir=None,
|
|
|
|
plugins_dir=None,
|
|
|
|
static_mounts=None,
|
2018-05-20 17:01:49 +00:00
|
|
|
config=None,
|
2018-06-17 20:14:55 +00:00
|
|
|
version_note=None,
|
2018-05-13 12:55:15 +00:00
|
|
|
):
|
2017-11-10 19:05:57 +00:00
|
|
|
self.files = files
|
2017-11-10 20:26:37 +00:00
|
|
|
self.cache_headers = cache_headers
|
2017-11-13 18:17:42 +00:00
|
|
|
self.cors = cors
|
2017-11-13 15:20:02 +00:00
|
|
|
self._inspect = inspect_data
|
2017-11-13 18:39:25 +00:00
|
|
|
self.metadata = metadata or {}
|
2017-11-15 02:41:03 +00:00
|
|
|
self.sqlite_functions = []
|
2017-11-16 16:46:04 +00:00
|
|
|
self.sqlite_extensions = sqlite_extensions or []
|
2017-11-30 16:05:01 +00:00
|
|
|
self.template_dir = template_dir
|
2018-04-16 05:22:01 +00:00
|
|
|
self.plugins_dir = plugins_dir
|
2017-12-03 16:33:36 +00:00
|
|
|
self.static_mounts = static_mounts or []
|
2018-08-11 20:06:45 +00:00
|
|
|
self._config = dict(DEFAULT_CONFIG, **(config or {}))
|
2018-06-17 20:14:55 +00:00
|
|
|
self.version_note = version_note
|
2018-05-27 00:43:22 +00:00
|
|
|
self.executor = futures.ThreadPoolExecutor(
|
2018-08-11 20:06:45 +00:00
|
|
|
max_workers=self.config("num_sql_threads")
|
2018-05-27 00:43:22 +00:00
|
|
|
)
|
2018-08-11 20:06:45 +00:00
|
|
|
self.max_returned_rows = self.config("max_returned_rows")
|
|
|
|
self.sql_time_limit_ms = self.config("sql_time_limit_ms")
|
|
|
|
self.page_size = self.config("default_page_size")
|
2018-04-16 05:22:01 +00:00
|
|
|
# Execute plugins in constructor, to ensure they are available
|
|
|
|
# when the rest of `datasette inspect` executes
|
|
|
|
if self.plugins_dir:
|
|
|
|
for filename in os.listdir(self.plugins_dir):
|
|
|
|
filepath = os.path.join(self.plugins_dir, filename)
|
2018-05-13 12:44:22 +00:00
|
|
|
mod = module_from_path(filepath, name=filename)
|
|
|
|
try:
|
|
|
|
pm.register(mod)
|
|
|
|
except ValueError:
|
|
|
|
# Plugin already registered
|
|
|
|
pass
|
2017-11-13 15:20:02 +00:00
|
|
|
|
2018-08-11 20:06:45 +00:00
|
|
|
def config(self, key):
|
|
|
|
return self._config.get(key, None)
|
|
|
|
|
|
|
|
def config_dict(self):
|
|
|
|
# Returns a fully resolved config dictionary, useful for templates
|
|
|
|
return {
|
|
|
|
option.name: self.config(option.name)
|
|
|
|
for option in CONFIG_OPTIONS
|
|
|
|
}
|
|
|
|
|
2017-12-09 03:10:09 +00:00
|
|
|
def app_css_hash(self):
|
2018-05-13 12:55:15 +00:00
|
|
|
if not hasattr(self, "_app_css_hash"):
|
2017-12-09 03:10:09 +00:00
|
|
|
self._app_css_hash = hashlib.sha1(
|
2018-05-13 12:55:15 +00:00
|
|
|
open(
|
|
|
|
os.path.join(str(app_root), "datasette/static/app.css")
|
|
|
|
).read().encode(
|
|
|
|
"utf8"
|
|
|
|
)
|
|
|
|
).hexdigest()[
|
|
|
|
:6
|
|
|
|
]
|
2017-12-09 03:10:09 +00:00
|
|
|
return self._app_css_hash
|
|
|
|
|
2018-07-16 02:33:30 +00:00
|
|
|
def get_canned_queries(self, database_name):
|
|
|
|
names = self.metadata.get("databases", {}).get(database_name, {}).get(
|
|
|
|
"queries", {}
|
|
|
|
).keys()
|
|
|
|
return [
|
|
|
|
self.get_canned_query(database_name, name) for name in names
|
|
|
|
]
|
|
|
|
|
2017-12-05 16:17:02 +00:00
|
|
|
def get_canned_query(self, database_name, query_name):
|
2018-05-13 12:55:15 +00:00
|
|
|
query = self.metadata.get("databases", {}).get(database_name, {}).get(
|
|
|
|
"queries", {}
|
2017-12-05 16:17:02 +00:00
|
|
|
).get(
|
2018-05-13 12:55:15 +00:00
|
|
|
query_name
|
|
|
|
)
|
2017-12-05 16:17:02 +00:00
|
|
|
if query:
|
2018-07-16 02:33:30 +00:00
|
|
|
if not isinstance(query, dict):
|
|
|
|
query = {"sql": query}
|
|
|
|
query["name"] = query_name
|
|
|
|
return query
|
2017-12-05 16:17:02 +00:00
|
|
|
|
2018-06-16 17:33:17 +00:00
|
|
|
async def get_table_definition(self, database_name, table, type_="table"):
|
|
|
|
table_definition_rows = list(
|
|
|
|
await self.execute(
|
|
|
|
database_name,
|
|
|
|
'select sql from sqlite_master where name = :n and type=:t',
|
|
|
|
{"n": table, "t": type_},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if not table_definition_rows:
|
|
|
|
return None
|
|
|
|
return table_definition_rows[0][0]
|
|
|
|
|
|
|
|
def get_view_definition(self, database_name, view):
|
|
|
|
return self.get_table_definition(database_name, view, 'view')
|
|
|
|
|
2017-11-29 02:38:15 +00:00
|
|
|
def asset_urls(self, key):
|
2018-04-18 03:12:21 +00:00
|
|
|
# Flatten list-of-lists from plugins:
|
2018-05-27 08:45:03 +00:00
|
|
|
seen_urls = set()
|
|
|
|
for url_or_dict in itertools.chain(
|
|
|
|
itertools.chain.from_iterable(getattr(pm.hook, key)()),
|
|
|
|
(self.metadata.get(key) or [])
|
|
|
|
):
|
2017-11-29 02:38:15 +00:00
|
|
|
if isinstance(url_or_dict, dict):
|
2018-05-27 08:45:03 +00:00
|
|
|
url = url_or_dict["url"]
|
|
|
|
sri = url_or_dict.get("sri")
|
|
|
|
else:
|
|
|
|
url = url_or_dict
|
|
|
|
sri = None
|
|
|
|
if url in seen_urls:
|
|
|
|
continue
|
|
|
|
seen_urls.add(url)
|
|
|
|
if sri:
|
|
|
|
yield {"url": url, "sri": sri}
|
2017-11-29 02:38:15 +00:00
|
|
|
else:
|
2018-05-27 08:45:03 +00:00
|
|
|
yield {"url": url}
|
2017-11-29 02:38:15 +00:00
|
|
|
|
|
|
|
def extra_css_urls(self):
|
2018-05-13 12:55:15 +00:00
|
|
|
return self.asset_urls("extra_css_urls")
|
2017-11-29 02:38:15 +00:00
|
|
|
|
|
|
|
def extra_js_urls(self):
|
2018-05-13 12:55:15 +00:00
|
|
|
return self.asset_urls("extra_js_urls")
|
2017-11-29 02:38:15 +00:00
|
|
|
|
2018-03-27 16:18:32 +00:00
|
|
|
def update_with_inherited_metadata(self, metadata):
|
|
|
|
# Fills in source/license with defaults, if available
|
2018-05-13 12:55:15 +00:00
|
|
|
metadata.update(
|
|
|
|
{
|
|
|
|
"source": metadata.get("source") or self.metadata.get("source"),
|
|
|
|
"source_url": metadata.get("source_url")
|
|
|
|
or self.metadata.get("source_url"),
|
|
|
|
"license": metadata.get("license") or self.metadata.get("license"),
|
|
|
|
"license_url": metadata.get("license_url")
|
|
|
|
or self.metadata.get("license_url"),
|
|
|
|
}
|
|
|
|
)
|
2018-03-27 16:18:32 +00:00
|
|
|
|
2017-11-26 22:51:42 +00:00
|
|
|
def prepare_connection(self, conn):
|
|
|
|
conn.row_factory = sqlite3.Row
|
2018-05-13 12:55:15 +00:00
|
|
|
conn.text_factory = lambda x: str(x, "utf-8", "replace")
|
2017-11-26 22:51:42 +00:00
|
|
|
for name, num_args, func in self.sqlite_functions:
|
|
|
|
conn.create_function(name, num_args, func)
|
|
|
|
if self.sqlite_extensions:
|
|
|
|
conn.enable_load_extension(True)
|
|
|
|
for extension in self.sqlite_extensions:
|
|
|
|
conn.execute("SELECT load_extension('{}')".format(extension))
|
2018-08-11 20:06:45 +00:00
|
|
|
if self.config("cache_size_kb"):
|
|
|
|
conn.execute('PRAGMA cache_size=-{}'.format(self.config("cache_size_kb")))
|
2018-04-16 00:56:15 +00:00
|
|
|
pm.hook.prepare_connection(conn=conn)
|
2017-11-26 22:51:42 +00:00
|
|
|
|
2018-06-15 06:51:23 +00:00
|
|
|
def table_exists(self, database, table):
|
|
|
|
return table in self.inspect().get(database, {}).get("tables")
|
|
|
|
|
2017-11-13 15:20:02 +00:00
|
|
|
def inspect(self):
|
2018-05-21 08:02:34 +00:00
|
|
|
" Inspect the database and return a dictionary of table metadata "
|
|
|
|
if self._inspect:
|
|
|
|
return self._inspect
|
|
|
|
|
|
|
|
self._inspect = {}
|
|
|
|
for filename in self.files:
|
|
|
|
path = Path(filename)
|
|
|
|
name = path.stem
|
|
|
|
if name in self._inspect:
|
|
|
|
raise Exception("Multiple files with same stem %s" % name)
|
2018-07-10 15:13:27 +00:00
|
|
|
try:
|
|
|
|
with sqlite3.connect(
|
|
|
|
"file:{}?immutable=1".format(path), uri=True
|
|
|
|
) as conn:
|
|
|
|
self.prepare_connection(conn)
|
|
|
|
self._inspect[name] = {
|
|
|
|
"hash": inspect_hash(path),
|
|
|
|
"file": str(path),
|
|
|
|
"views": inspect_views(conn),
|
|
|
|
"tables": inspect_tables(conn, self.metadata.get("databases", {}).get(name, {}))
|
|
|
|
}
|
|
|
|
except sqlite3.OperationalError as e:
|
|
|
|
if (e.args[0] == 'no such module: VirtualSpatialIndex'):
|
|
|
|
raise click.UsageError(
|
|
|
|
"It looks like you're trying to load a SpatiaLite"
|
|
|
|
" database without first loading the SpatiaLite module."
|
|
|
|
"\n\nRead more: https://datasette.readthedocs.io/en/latest/spatialite.html"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise
|
2017-11-13 15:20:02 +00:00
|
|
|
return self._inspect
|
2017-11-10 19:05:57 +00:00
|
|
|
|
2018-04-14 11:27:06 +00:00
|
|
|
def register_custom_units(self):
|
|
|
|
"Register any custom units defined in the metadata.json with Pint"
|
2018-05-13 12:55:15 +00:00
|
|
|
for unit in self.metadata.get("custom_units", []):
|
2018-04-14 11:27:06 +00:00
|
|
|
ureg.define(unit)
|
|
|
|
|
2018-05-02 08:46:54 +00:00
|
|
|
def versions(self):
|
2018-05-13 12:55:15 +00:00
|
|
|
conn = sqlite3.connect(":memory:")
|
2018-05-02 08:46:54 +00:00
|
|
|
self.prepare_connection(conn)
|
2018-05-13 12:55:15 +00:00
|
|
|
sqlite_version = conn.execute("select sqlite_version()").fetchone()[0]
|
2018-05-02 08:46:54 +00:00
|
|
|
sqlite_extensions = {}
|
|
|
|
for extension, testsql, hasversion in (
|
2018-05-13 12:55:15 +00:00
|
|
|
("json1", "SELECT json('{}')", False),
|
|
|
|
("spatialite", "SELECT spatialite_version()", True),
|
2018-05-02 08:46:54 +00:00
|
|
|
):
|
|
|
|
try:
|
|
|
|
result = conn.execute(testsql)
|
|
|
|
if hasversion:
|
|
|
|
sqlite_extensions[extension] = result.fetchone()[0]
|
|
|
|
else:
|
|
|
|
sqlite_extensions[extension] = None
|
|
|
|
except Exception as e:
|
|
|
|
pass
|
2018-05-11 13:19:25 +00:00
|
|
|
# Figure out supported FTS versions
|
|
|
|
fts_versions = []
|
2018-05-13 12:55:15 +00:00
|
|
|
for fts in ("FTS5", "FTS4", "FTS3"):
|
2018-05-11 13:19:25 +00:00
|
|
|
try:
|
|
|
|
conn.execute(
|
2018-05-23 17:43:34 +00:00
|
|
|
"CREATE VIRTUAL TABLE v{fts} USING {fts} (data)".format(fts=fts)
|
2018-05-11 13:19:25 +00:00
|
|
|
)
|
|
|
|
fts_versions.append(fts)
|
|
|
|
except sqlite3.OperationalError:
|
|
|
|
continue
|
2018-06-17 20:14:55 +00:00
|
|
|
datasette_version = {"version": __version__}
|
|
|
|
if self.version_note:
|
|
|
|
datasette_version["note"] = self.version_note
|
2018-05-02 08:46:54 +00:00
|
|
|
return {
|
2018-05-13 12:55:15 +00:00
|
|
|
"python": {
|
|
|
|
"version": ".".join(map(str, sys.version_info[:3])), "full": sys.version
|
2018-05-02 08:46:54 +00:00
|
|
|
},
|
2018-06-17 20:14:55 +00:00
|
|
|
"datasette": datasette_version,
|
2018-05-13 12:55:15 +00:00
|
|
|
"sqlite": {
|
|
|
|
"version": sqlite_version,
|
|
|
|
"fts_versions": fts_versions,
|
|
|
|
"extensions": sqlite_extensions,
|
2018-05-02 08:46:54 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2018-05-13 13:06:02 +00:00
|
|
|
def plugins(self):
|
|
|
|
return [
|
|
|
|
{
|
|
|
|
"name": p["name"],
|
|
|
|
"static": p["static_path"] is not None,
|
|
|
|
"templates": p["templates_path"] is not None,
|
|
|
|
"version": p.get("version"),
|
|
|
|
}
|
|
|
|
for p in get_plugins(pm)
|
|
|
|
]
|
|
|
|
|
2018-05-25 00:15:37 +00:00
|
|
|
async def execute(
|
|
|
|
self,
|
|
|
|
db_name,
|
|
|
|
sql,
|
|
|
|
params=None,
|
|
|
|
truncate=False,
|
|
|
|
custom_time_limit=None,
|
|
|
|
page_size=None,
|
|
|
|
):
|
|
|
|
"""Executes sql against db_name in a thread"""
|
|
|
|
page_size = page_size or self.page_size
|
|
|
|
|
|
|
|
def sql_operation_in_thread():
|
|
|
|
conn = getattr(connections, db_name, None)
|
|
|
|
if not conn:
|
|
|
|
info = self.inspect()[db_name]
|
|
|
|
conn = sqlite3.connect(
|
|
|
|
"file:{}?immutable=1".format(info["file"]),
|
|
|
|
uri=True,
|
|
|
|
check_same_thread=False,
|
|
|
|
)
|
|
|
|
self.prepare_connection(conn)
|
|
|
|
setattr(connections, db_name, conn)
|
|
|
|
|
|
|
|
time_limit_ms = self.sql_time_limit_ms
|
|
|
|
if custom_time_limit and custom_time_limit < time_limit_ms:
|
|
|
|
time_limit_ms = custom_time_limit
|
|
|
|
|
|
|
|
with sqlite_timelimit(conn, time_limit_ms):
|
|
|
|
try:
|
|
|
|
cursor = conn.cursor()
|
|
|
|
cursor.execute(sql, params or {})
|
|
|
|
max_returned_rows = self.max_returned_rows
|
|
|
|
if max_returned_rows == page_size:
|
|
|
|
max_returned_rows += 1
|
|
|
|
if max_returned_rows and truncate:
|
|
|
|
rows = cursor.fetchmany(max_returned_rows + 1)
|
|
|
|
truncated = len(rows) > max_returned_rows
|
|
|
|
rows = rows[:max_returned_rows]
|
|
|
|
else:
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
truncated = False
|
|
|
|
except sqlite3.OperationalError as e:
|
|
|
|
if e.args == ('interrupted',):
|
|
|
|
raise InterruptedError(e)
|
|
|
|
print(
|
|
|
|
"ERROR: conn={}, sql = {}, params = {}: {}".format(
|
|
|
|
conn, repr(sql), params, e
|
|
|
|
)
|
|
|
|
)
|
|
|
|
raise
|
|
|
|
|
|
|
|
if truncate:
|
|
|
|
return Results(rows, truncated, cursor.description)
|
|
|
|
|
|
|
|
else:
|
|
|
|
return Results(rows, False, cursor.description)
|
|
|
|
|
|
|
|
return await asyncio.get_event_loop().run_in_executor(
|
|
|
|
self.executor, sql_operation_in_thread
|
|
|
|
)
|
|
|
|
|
2017-11-10 19:05:57 +00:00
|
|
|
def app(self):
|
|
|
|
app = Sanic(__name__)
|
2018-05-13 12:55:15 +00:00
|
|
|
default_templates = str(app_root / "datasette" / "templates")
|
2018-04-19 05:50:27 +00:00
|
|
|
template_paths = []
|
2017-11-30 16:05:01 +00:00
|
|
|
if self.template_dir:
|
2018-04-19 05:50:27 +00:00
|
|
|
template_paths.append(self.template_dir)
|
2018-05-13 12:55:15 +00:00
|
|
|
template_paths.extend(
|
|
|
|
[
|
|
|
|
plugin["templates_path"]
|
|
|
|
for plugin in get_plugins(pm)
|
|
|
|
if plugin["templates_path"]
|
|
|
|
]
|
|
|
|
)
|
2018-04-19 05:50:27 +00:00
|
|
|
template_paths.append(default_templates)
|
2018-05-13 12:55:15 +00:00
|
|
|
template_loader = ChoiceLoader(
|
|
|
|
[
|
|
|
|
FileSystemLoader(template_paths),
|
|
|
|
# Support {% extends "default:table.html" %}:
|
|
|
|
PrefixLoader(
|
|
|
|
{"default": FileSystemLoader(default_templates)}, delimiter=":"
|
|
|
|
),
|
|
|
|
]
|
2017-11-10 19:05:57 +00:00
|
|
|
)
|
2018-05-13 12:55:15 +00:00
|
|
|
self.jinja_env = Environment(loader=template_loader, autoescape=True)
|
|
|
|
self.jinja_env.filters["escape_css_string"] = escape_css_string
|
|
|
|
self.jinja_env.filters["quote_plus"] = lambda u: urllib.parse.quote_plus(u)
|
|
|
|
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
|
|
|
|
self.jinja_env.filters["to_css_class"] = to_css_class
|
2018-04-16 00:56:15 +00:00
|
|
|
pm.hook.prepare_jinja2_environment(env=self.jinja_env)
|
2018-06-15 06:51:23 +00:00
|
|
|
app.add_route(IndexView.as_view(self), "/<as_format:(\.jsono?)?$>")
|
2017-11-10 19:05:57 +00:00
|
|
|
# TODO: /favicon.ico and /-/static/ deserve far-future cache expires
|
2018-05-13 12:55:15 +00:00
|
|
|
app.add_route(favicon, "/favicon.ico")
|
|
|
|
app.static("/-/static/", str(app_root / "datasette" / "static"))
|
2017-12-03 16:33:36 +00:00
|
|
|
for path, dirname in self.static_mounts:
|
|
|
|
app.static(path, dirname)
|
2018-04-18 02:32:48 +00:00
|
|
|
# Mount any plugin static/ directories
|
2018-04-19 05:24:48 +00:00
|
|
|
for plugin in get_plugins(pm):
|
2018-05-13 12:55:15 +00:00
|
|
|
if plugin["static_path"]:
|
|
|
|
modpath = "/-/static-plugins/{}/".format(plugin["name"])
|
|
|
|
app.static(modpath, plugin["static_path"])
|
2018-04-19 05:24:48 +00:00
|
|
|
app.add_route(
|
2018-05-13 12:55:15 +00:00
|
|
|
JsonDataView.as_view(self, "inspect.json", self.inspect),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/-/inspect<as_format:(\.json)?$>",
|
2018-04-19 05:24:48 +00:00
|
|
|
)
|
|
|
|
app.add_route(
|
2018-05-13 12:55:15 +00:00
|
|
|
JsonDataView.as_view(self, "metadata.json", lambda: self.metadata),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/-/metadata<as_format:(\.json)?$>",
|
2018-04-19 05:24:48 +00:00
|
|
|
)
|
2018-05-02 08:46:54 +00:00
|
|
|
app.add_route(
|
2018-05-13 12:55:15 +00:00
|
|
|
JsonDataView.as_view(self, "versions.json", self.versions),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/-/versions<as_format:(\.json)?$>",
|
2018-05-02 08:46:54 +00:00
|
|
|
)
|
2018-04-19 05:24:48 +00:00
|
|
|
app.add_route(
|
2018-05-13 13:06:02 +00:00
|
|
|
JsonDataView.as_view(self, "plugins.json", self.plugins),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/-/plugins<as_format:(\.json)?$>",
|
2018-04-19 05:24:48 +00:00
|
|
|
)
|
2018-05-18 06:16:28 +00:00
|
|
|
app.add_route(
|
2018-08-11 20:06:45 +00:00
|
|
|
JsonDataView.as_view(self, "config.json", lambda: self._config),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/-/config<as_format:(\.json)?$>",
|
2018-05-18 06:16:28 +00:00
|
|
|
)
|
2017-11-10 19:05:57 +00:00
|
|
|
app.add_route(
|
2018-06-21 15:21:09 +00:00
|
|
|
DatabaseDownload.as_view(self), "/<db_name:[^/]+?><as_db:(\.db)$>"
|
2017-11-10 19:05:57 +00:00
|
|
|
)
|
|
|
|
app.add_route(
|
2018-06-21 15:21:09 +00:00
|
|
|
DatabaseView.as_view(self), "/<db_name:[^/]+?><as_format:(\.jsono?|\.csv)?$>"
|
2017-11-10 19:05:57 +00:00
|
|
|
)
|
|
|
|
app.add_route(
|
|
|
|
TableView.as_view(self),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/<db_name:[^/]+>/<table_and_format:[^/]+?$>",
|
2017-11-10 19:05:57 +00:00
|
|
|
)
|
|
|
|
app.add_route(
|
|
|
|
RowView.as_view(self),
|
2018-06-15 06:51:23 +00:00
|
|
|
"/<db_name:[^/]+>/<table:[^/]+?>/<pk_path:[^/]+?><as_format:(\.jsono?)?$>",
|
2017-11-10 19:05:57 +00:00
|
|
|
)
|
2018-04-14 11:27:06 +00:00
|
|
|
self.register_custom_units()
|
2018-06-21 15:13:07 +00:00
|
|
|
# On 404 with a trailing slash redirect to path without that slash:
|
|
|
|
@app.middleware("response")
|
|
|
|
def redirect_on_404_with_trailing_slash(request, original_response):
|
|
|
|
if original_response.status == 404 and request.path.endswith("/"):
|
|
|
|
path = request.path.rstrip("/")
|
|
|
|
if request.query_string:
|
|
|
|
path = "{}?{}".format(path, request.query_string)
|
|
|
|
return response.redirect(path)
|
2018-04-14 11:27:06 +00:00
|
|
|
|
2018-04-13 18:17:22 +00:00
|
|
|
@app.exception(Exception)
|
|
|
|
def on_exception(request, exception):
|
|
|
|
title = None
|
2018-05-28 21:24:19 +00:00
|
|
|
help = None
|
2018-04-13 18:17:22 +00:00
|
|
|
if isinstance(exception, NotFound):
|
|
|
|
status = 404
|
|
|
|
info = {}
|
|
|
|
message = exception.args[0]
|
2018-04-14 16:11:16 +00:00
|
|
|
elif isinstance(exception, InvalidUsage):
|
|
|
|
status = 405
|
|
|
|
info = {}
|
|
|
|
message = exception.args[0]
|
2018-04-13 18:17:22 +00:00
|
|
|
elif isinstance(exception, DatasetteError):
|
|
|
|
status = exception.status
|
|
|
|
info = exception.error_dict
|
|
|
|
message = exception.message
|
2018-05-28 21:24:19 +00:00
|
|
|
if exception.messagge_is_html:
|
|
|
|
message = Markup(message)
|
2018-04-13 18:17:22 +00:00
|
|
|
title = exception.title
|
|
|
|
else:
|
|
|
|
status = 500
|
|
|
|
info = {}
|
|
|
|
message = str(exception)
|
2018-04-14 13:17:20 +00:00
|
|
|
traceback.print_exc()
|
2018-05-13 12:55:15 +00:00
|
|
|
templates = ["500.html"]
|
2018-04-13 18:17:22 +00:00
|
|
|
if status != 500:
|
2018-05-13 12:55:15 +00:00
|
|
|
templates = ["{}.html".format(status)] + templates
|
|
|
|
info.update(
|
|
|
|
{"ok": False, "error": message, "status": status, "title": title}
|
|
|
|
)
|
2018-06-29 12:52:51 +00:00
|
|
|
if request is not None and request.path.split("?")[0].endswith(".json"):
|
2018-04-13 18:17:22 +00:00
|
|
|
return response.json(info, status=status)
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2018-04-13 18:17:22 +00:00
|
|
|
else:
|
|
|
|
template = self.jinja_env.select_template(templates)
|
|
|
|
return response.html(template.render(info), status=status)
|
|
|
|
|
2017-11-10 19:05:57 +00:00
|
|
|
return app
|