kopia lustrzana https://github.com/simonw/datasette
Merge pull request #1912 from simonw/1.0-dev
Merge 1.0-dev (with initial write API) back into mainpull/1930/head
commit
07aad51176
|
|
@ -3,7 +3,8 @@ name: Deploy latest.datasette.io
|
|||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- main
|
||||
- 1.0-dev
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
|
@ -68,6 +69,8 @@ jobs:
|
|||
gcloud config set project datasette-222320
|
||||
export SUFFIX="-${GITHUB_REF#refs/heads/}"
|
||||
export SUFFIX=${SUFFIX#-main}
|
||||
# Replace 1.0 with one-dot-zero in SUFFIX
|
||||
export SUFFIX=${SUFFIX//1.0/one-dot-zero}
|
||||
datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \
|
||||
-m fixtures.json \
|
||||
--plugins-dir=plugins \
|
||||
|
|
|
|||
|
|
@ -27,19 +27,21 @@ from jinja2.environment import Template
|
|||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
from .views.base import ureg
|
||||
from .views.database import DatabaseDownload, DatabaseView
|
||||
from .views.database import DatabaseDownload, DatabaseView, TableCreateView
|
||||
from .views.index import IndexView
|
||||
from .views.special import (
|
||||
JsonDataView,
|
||||
PatternPortfolioView,
|
||||
AuthTokenView,
|
||||
ApiExplorerView,
|
||||
CreateTokenView,
|
||||
LogoutView,
|
||||
AllowDebugView,
|
||||
PermissionsDebugView,
|
||||
MessagesDebugView,
|
||||
)
|
||||
from .views.table import TableView
|
||||
from .views.row import RowView
|
||||
from .views.table import TableView, TableInsertView, TableDropView
|
||||
from .views.row import RowView, RowDeleteView, RowUpdateView
|
||||
from .renderer import json_renderer
|
||||
from .url_builder import Urls
|
||||
from .database import Database, QueryInterrupted
|
||||
|
|
@ -60,13 +62,19 @@ from .utils import (
|
|||
parse_metadata,
|
||||
resolve_env_secrets,
|
||||
resolve_routes,
|
||||
tilde_decode,
|
||||
to_css_class,
|
||||
urlsafe_components,
|
||||
row_sql_params_pks,
|
||||
)
|
||||
from .utils.asgi import (
|
||||
AsgiLifespan,
|
||||
Base400,
|
||||
Forbidden,
|
||||
NotFound,
|
||||
DatabaseNotFound,
|
||||
TableNotFound,
|
||||
RowNotFound,
|
||||
Request,
|
||||
Response,
|
||||
asgi_static,
|
||||
|
|
@ -98,6 +106,11 @@ SETTINGS = (
|
|||
1000,
|
||||
"Maximum rows that can be returned from a table or custom query",
|
||||
),
|
||||
Setting(
|
||||
"max_insert_rows",
|
||||
100,
|
||||
"Maximum rows that can be inserted at a time using the bulk insert API",
|
||||
),
|
||||
Setting(
|
||||
"num_sql_threads",
|
||||
3,
|
||||
|
|
@ -123,6 +136,16 @@ SETTINGS = (
|
|||
True,
|
||||
"Allow users to download the original SQLite database files",
|
||||
),
|
||||
Setting(
|
||||
"allow_signed_tokens",
|
||||
True,
|
||||
"Allow users to create and use signed API tokens",
|
||||
),
|
||||
Setting(
|
||||
"max_signed_tokens_ttl",
|
||||
0,
|
||||
"Maximum allowed expiry time for signed API tokens",
|
||||
),
|
||||
Setting("suggest_facets", True, "Calculate and display suggested facets"),
|
||||
Setting(
|
||||
"default_cache_ttl",
|
||||
|
|
@ -181,6 +204,12 @@ async def favicon(request, send):
|
|||
)
|
||||
|
||||
|
||||
ResolvedTable = collections.namedtuple("ResolvedTable", ("db", "table", "is_view"))
|
||||
ResolvedRow = collections.namedtuple(
|
||||
"ResolvedRow", ("db", "table", "sql", "params", "pks", "pk_values", "row")
|
||||
)
|
||||
|
||||
|
||||
class Datasette:
|
||||
# Message constants:
|
||||
INFO = 1
|
||||
|
|
@ -1083,6 +1112,7 @@ class Datasette:
|
|||
),
|
||||
"base_url": self.setting("base_url"),
|
||||
"csrftoken": request.scope["csrftoken"] if request else lambda: "",
|
||||
"datasette_version": __version__,
|
||||
},
|
||||
**extra_template_vars,
|
||||
}
|
||||
|
|
@ -1215,6 +1245,14 @@ class Datasette:
|
|||
AuthTokenView.as_view(self),
|
||||
r"/-/auth-token$",
|
||||
)
|
||||
add_route(
|
||||
CreateTokenView.as_view(self),
|
||||
r"/-/create-token$",
|
||||
)
|
||||
add_route(
|
||||
ApiExplorerView.as_view(self),
|
||||
r"/-/api$",
|
||||
)
|
||||
add_route(
|
||||
LogoutView.as_view(self),
|
||||
r"/-/logout$",
|
||||
|
|
@ -1239,6 +1277,7 @@ class Datasette:
|
|||
add_route(
|
||||
DatabaseView.as_view(self), r"/(?P<database>[^\/\.]+)(\.(?P<format>\w+))?$"
|
||||
)
|
||||
add_route(TableCreateView.as_view(self), r"/(?P<database>[^\/\.]+)/-/create$")
|
||||
add_route(
|
||||
TableView.as_view(self),
|
||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)(\.(?P<format>\w+))?$",
|
||||
|
|
@ -1247,12 +1286,63 @@ class Datasette:
|
|||
RowView.as_view(self),
|
||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)(\.(?P<format>\w+))?$",
|
||||
)
|
||||
add_route(
|
||||
TableInsertView.as_view(self),
|
||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/insert$",
|
||||
)
|
||||
add_route(
|
||||
TableDropView.as_view(self),
|
||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^\/\.]+)/-/drop$",
|
||||
)
|
||||
add_route(
|
||||
RowDeleteView.as_view(self),
|
||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)/-/delete$",
|
||||
)
|
||||
add_route(
|
||||
RowUpdateView.as_view(self),
|
||||
r"/(?P<database>[^\/\.]+)/(?P<table>[^/]+?)/(?P<pks>[^/]+?)/-/update$",
|
||||
)
|
||||
return [
|
||||
# Compile any strings to regular expressions
|
||||
((re.compile(pattern) if isinstance(pattern, str) else pattern), view)
|
||||
for pattern, view in routes
|
||||
]
|
||||
|
||||
async def resolve_database(self, request):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
try:
|
||||
return self.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise DatabaseNotFound(
|
||||
"Database not found: {}".format(database_route), database_route
|
||||
)
|
||||
|
||||
async def resolve_table(self, request):
|
||||
db = await self.resolve_database(request)
|
||||
table_name = tilde_decode(request.url_vars["table"])
|
||||
# Table must exist
|
||||
is_view = False
|
||||
table_exists = await db.table_exists(table_name)
|
||||
if not table_exists:
|
||||
is_view = await db.view_exists(table_name)
|
||||
if not (table_exists or is_view):
|
||||
raise TableNotFound(
|
||||
"Table not found: {}".format(table_name), db.name, table_name
|
||||
)
|
||||
return ResolvedTable(db, table_name, is_view)
|
||||
|
||||
async def resolve_row(self, request):
|
||||
db, table_name, _ = await self.resolve_table(request)
|
||||
pk_values = urlsafe_components(request.url_vars["pks"])
|
||||
sql, params, pks = await row_sql_params_pks(db, table_name, pk_values)
|
||||
results = await db.execute(sql, params, truncate=True)
|
||||
row = results.first()
|
||||
if row is None:
|
||||
raise RowNotFound(
|
||||
"Row not found: {}".format(pk_values), db.name, table_name, pk_values
|
||||
)
|
||||
return ResolvedRow(db, table_name, sql, params, pks, pk_values, results.first())
|
||||
|
||||
def app(self):
|
||||
"""Returns an ASGI app function that serves the whole of Datasette"""
|
||||
routes = self._routes()
|
||||
|
|
|
|||
|
|
@ -338,6 +338,12 @@ class Database:
|
|||
)
|
||||
return bool(results.rows)
|
||||
|
||||
async def view_exists(self, table):
|
||||
results = await self.execute(
|
||||
"select 1 from sqlite_master where type='view' and name=?", params=(table,)
|
||||
)
|
||||
return bool(results.rows)
|
||||
|
||||
async def table_names(self):
|
||||
results = await self.execute(
|
||||
"select name from sqlite_master where type='table'"
|
||||
|
|
|
|||
|
|
@ -1,11 +1,23 @@
|
|||
from datasette import hookimpl
|
||||
from datasette.utils import actor_matches_allow
|
||||
import click
|
||||
import itsdangerous
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
@hookimpl(tryfirst=True)
|
||||
def permission_allowed(datasette, actor, action, resource):
|
||||
@hookimpl(tryfirst=True, specname="permission_allowed")
|
||||
def permission_allowed_default(datasette, actor, action, resource):
|
||||
async def inner():
|
||||
if action in ("permissions-debug", "debug-menu"):
|
||||
if action in (
|
||||
"permissions-debug",
|
||||
"debug-menu",
|
||||
"insert-row",
|
||||
"create-table",
|
||||
"drop-table",
|
||||
"delete-row",
|
||||
"update-row",
|
||||
):
|
||||
if actor and actor.get("id") == "root":
|
||||
return True
|
||||
elif action == "view-instance":
|
||||
|
|
@ -45,3 +57,132 @@ def permission_allowed(datasette, actor, action, resource):
|
|||
return actor_matches_allow(actor, database_allow_sql)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
@hookimpl(specname="permission_allowed")
|
||||
def permission_allowed_actor_restrictions(actor, action, resource):
|
||||
if actor is None:
|
||||
return None
|
||||
if "_r" not in actor:
|
||||
# No restrictions, so we have no opinion
|
||||
return None
|
||||
_r = actor.get("_r")
|
||||
action_initials = "".join([word[0] for word in action.split("-")])
|
||||
# If _r is defined then we use those to further restrict the actor
|
||||
# Crucially, we only use this to say NO (return False) - we never
|
||||
# use it to return YES (True) because that might over-ride other
|
||||
# restrictions placed on this actor
|
||||
all_allowed = _r.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if action_initials in all_allowed:
|
||||
return None
|
||||
# How about for the current database?
|
||||
if action in ("view-database", "view-database-download", "execute-sql"):
|
||||
database_allowed = _r.get("d", {}).get(resource)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if action_initials in database_allowed:
|
||||
return None
|
||||
# Or the current table? That's any time the resource is (database, table)
|
||||
if not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = _r.get("t", {}).get(database, {}).get(table)
|
||||
# TODO: What should this do for canned queries?
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if action_initials in table_allowed:
|
||||
return None
|
||||
# This action is not specifically allowed, so reject it
|
||||
return False
|
||||
|
||||
|
||||
@hookimpl
|
||||
def actor_from_request(datasette, request):
|
||||
prefix = "dstok_"
|
||||
if not datasette.setting("allow_signed_tokens"):
|
||||
return None
|
||||
max_signed_tokens_ttl = datasette.setting("max_signed_tokens_ttl")
|
||||
authorization = request.headers.get("authorization")
|
||||
if not authorization:
|
||||
return None
|
||||
if not authorization.startswith("Bearer "):
|
||||
return None
|
||||
token = authorization[len("Bearer ") :]
|
||||
if not token.startswith(prefix):
|
||||
return None
|
||||
token = token[len(prefix) :]
|
||||
try:
|
||||
decoded = datasette.unsign(token, namespace="token")
|
||||
except itsdangerous.BadSignature:
|
||||
return None
|
||||
if "t" not in decoded:
|
||||
# Missing timestamp
|
||||
return None
|
||||
created = decoded["t"]
|
||||
if not isinstance(created, int):
|
||||
# Invalid timestamp
|
||||
return None
|
||||
duration = decoded.get("d")
|
||||
if duration is not None and not isinstance(duration, int):
|
||||
# Invalid duration
|
||||
return None
|
||||
if (duration is None and max_signed_tokens_ttl) or (
|
||||
duration is not None
|
||||
and max_signed_tokens_ttl
|
||||
and duration > max_signed_tokens_ttl
|
||||
):
|
||||
duration = max_signed_tokens_ttl
|
||||
if duration:
|
||||
if time.time() - created > duration:
|
||||
# Expired
|
||||
return None
|
||||
actor = {"id": decoded["a"], "token": "dstok"}
|
||||
if duration:
|
||||
actor["token_expires"] = created + duration
|
||||
return actor
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_commands(cli):
|
||||
from datasette.app import Datasette
|
||||
|
||||
@cli.command()
|
||||
@click.argument("id")
|
||||
@click.option(
|
||||
"--secret",
|
||||
help="Secret used for signing the API tokens",
|
||||
envvar="DATASETTE_SECRET",
|
||||
required=True,
|
||||
)
|
||||
@click.option(
|
||||
"-e",
|
||||
"--expires-after",
|
||||
help="Token should expire after this many seconds",
|
||||
type=int,
|
||||
)
|
||||
@click.option(
|
||||
"--debug",
|
||||
help="Show decoded token",
|
||||
is_flag=True,
|
||||
)
|
||||
def create_token(id, secret, expires_after, debug):
|
||||
"Create a signed API token for the specified actor ID"
|
||||
ds = Datasette(secret=secret)
|
||||
bits = {"a": id, "token": "dstok", "t": int(time.time())}
|
||||
if expires_after:
|
||||
bits["d"] = expires_after
|
||||
token = ds.sign(bits, namespace="token")
|
||||
click.echo("dstok_{}".format(token))
|
||||
if debug:
|
||||
click.echo("\nDecoded:\n")
|
||||
click.echo(json.dumps(ds.unsign(token, namespace="token"), indent=2))
|
||||
|
||||
|
||||
@hookimpl
|
||||
def skip_csrf(scope):
|
||||
# Skip CSRF check for requests with content-type: application/json
|
||||
if scope["type"] == "http":
|
||||
headers = scope.get("headers") or {}
|
||||
if dict(headers).get(b"content-type") == b"application/json":
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
import collections
|
||||
|
||||
Permission = collections.namedtuple(
|
||||
"Permission", ("name", "abbr", "takes_database", "takes_table", "default")
|
||||
)
|
||||
|
||||
PERMISSIONS = (
|
||||
Permission("view-instance", "vi", False, False, True),
|
||||
Permission("view-database", "vd", True, False, True),
|
||||
Permission("view-database-download", "vdd", True, False, True),
|
||||
Permission("view-table", "vt", True, True, True),
|
||||
Permission("view-query", "vq", True, True, True),
|
||||
Permission("insert-row", "ir", True, True, False),
|
||||
Permission("delete-row", "dr", True, True, False),
|
||||
Permission("drop-table", "dt", True, True, False),
|
||||
Permission("execute-sql", "es", True, False, True),
|
||||
Permission("permissions-debug", "pd", False, False, False),
|
||||
Permission("debug-menu", "dm", False, False, False),
|
||||
)
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
https://github.com/luyilin/json-format-highlight
|
||||
From https://unpkg.com/json-format-highlight@1.0.1/dist/json-format-highlight.js
|
||||
MIT Licensed
|
||||
*/
|
||||
(function (global, factory) {
|
||||
typeof exports === "object" && typeof module !== "undefined"
|
||||
? (module.exports = factory())
|
||||
: typeof define === "function" && define.amd
|
||||
? define(factory)
|
||||
: (global.jsonFormatHighlight = factory());
|
||||
})(this, function () {
|
||||
"use strict";
|
||||
|
||||
var defaultColors = {
|
||||
keyColor: "dimgray",
|
||||
numberColor: "lightskyblue",
|
||||
stringColor: "lightcoral",
|
||||
trueColor: "lightseagreen",
|
||||
falseColor: "#f66578",
|
||||
nullColor: "cornflowerblue",
|
||||
};
|
||||
|
||||
function index(json, colorOptions) {
|
||||
if (colorOptions === void 0) colorOptions = {};
|
||||
|
||||
if (!json) {
|
||||
return;
|
||||
}
|
||||
if (typeof json !== "string") {
|
||||
json = JSON.stringify(json, null, 2);
|
||||
}
|
||||
var colors = Object.assign({}, defaultColors, colorOptions);
|
||||
json = json.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
return json.replace(
|
||||
/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+]?\d+)?)/g,
|
||||
function (match) {
|
||||
var color = colors.numberColor;
|
||||
if (/^"/.test(match)) {
|
||||
color = /:$/.test(match) ? colors.keyColor : colors.stringColor;
|
||||
} else {
|
||||
color = /true/.test(match)
|
||||
? colors.trueColor
|
||||
: /false/.test(match)
|
||||
? colors.falseColor
|
||||
: /null/.test(match)
|
||||
? colors.nullColor
|
||||
: color;
|
||||
}
|
||||
return '<span style="color: ' + color + '">' + match + "</span>";
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return index;
|
||||
});
|
||||
|
|
@ -9,7 +9,7 @@ document.body.addEventListener('click', (ev) => {
|
|||
if (target && target.tagName == 'DETAILS') {
|
||||
detailsClickedWithin = target;
|
||||
}
|
||||
Array.from(document.getElementsByTagName('details')).filter(
|
||||
Array.from(document.querySelectorAll('details.details-menu')).filter(
|
||||
(details) => details.open && details != detailsClickedWithin
|
||||
).forEach(details => details.open = false);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ p.message-warning {
|
|||
|
||||
<p>Use this tool to try out different actor and allow combinations. See <a href="https://docs.datasette.io/en/stable/authentication.html#defining-permissions-with-allow-blocks">Defining permissions with "allow" blocks</a> for documentation.</p>
|
||||
|
||||
<form action="{{ urls.path('-/allow-debug') }}" method="get">
|
||||
<form action="{{ urls.path('-/allow-debug') }}" method="get" style="margin-bottom: 1em">
|
||||
<div class="two-col">
|
||||
<p><label>Allow block</label></p>
|
||||
<textarea name="allow">{{ allow_input }}</textarea>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,208 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}API Explorer{% endblock %}
|
||||
|
||||
{% block extra_head %}
|
||||
<script src="{{ base_url }}-/static/json-format-highlight-1.0.1.js"></script>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>API Explorer</h1>
|
||||
|
||||
<p>Use this tool to try out the
|
||||
{% if datasette_version %}
|
||||
<a href="https://docs.datasette.io/en/{{ datasette_version }}/json_api.html">Datasette API</a>.
|
||||
{% else %}
|
||||
Datasette API.
|
||||
{% endif %}
|
||||
</p>
|
||||
<details open style="border: 2px solid #ccc; border-bottom: none; padding: 0.5em">
|
||||
<summary style="cursor: pointer;">GET</summary>
|
||||
<form method="get" id="api-explorer-get" style="margin-top: 0.7em">
|
||||
<div>
|
||||
<label for="path">API path:</label>
|
||||
<input type="text" id="path" name="path" style="width: 60%">
|
||||
<input type="submit" value="GET">
|
||||
</div>
|
||||
</form>
|
||||
</details>
|
||||
<details style="border: 2px solid #ccc; padding: 0.5em">
|
||||
<summary style="cursor: pointer">POST</summary>
|
||||
<form method="post" id="api-explorer-post" style="margin-top: 0.7em">
|
||||
<div>
|
||||
<label for="path">API path:</label>
|
||||
<input type="text" id="path" name="path" style="width: 60%">
|
||||
</div>
|
||||
<div style="margin: 0.5em 0">
|
||||
<label for="apiJson" style="vertical-align: top">JSON:</label>
|
||||
<textarea id="apiJson" name="json" style="width: 60%; height: 200px; font-family: monospace; font-size: 0.8em;"></textarea>
|
||||
</div>
|
||||
<p><button id="json-format" type="button">Format JSON</button> <input type="submit" value="POST"></p>
|
||||
</form>
|
||||
</details>
|
||||
|
||||
<div id="output" style="display: none">
|
||||
<h2>API response: HTTP <span id="response-status"></span></h2>
|
||||
</h2>
|
||||
<ul class="errors message-error"></ul>
|
||||
<pre></pre>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.querySelector('#json-format').addEventListener('click', (ev) => {
|
||||
ev.preventDefault();
|
||||
let json = document.querySelector('textarea[name="json"]').value.trim();
|
||||
if (!json) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(json);
|
||||
document.querySelector('textarea[name="json"]').value = JSON.stringify(parsed, null, 2);
|
||||
} catch (e) {
|
||||
alert("Error parsing JSON: " + e);
|
||||
}
|
||||
});
|
||||
var postForm = document.getElementById('api-explorer-post');
|
||||
var getForm = document.getElementById('api-explorer-get');
|
||||
var output = document.getElementById('output');
|
||||
var errorList = output.querySelector('.errors');
|
||||
|
||||
// On first load or fragment change populate forms from # in URL, if present
|
||||
if (window.location.hash) {
|
||||
onFragmentChange();
|
||||
}
|
||||
function onFragmentChange() {
|
||||
var hash = window.location.hash.slice(1);
|
||||
// Treat hash as a foo=bar string and parse it:
|
||||
var params = new URLSearchParams(hash);
|
||||
var method = params.get('method');
|
||||
if (method == 'GET') {
|
||||
getForm.closest('details').open = true;
|
||||
postForm.closest('details').open = false;
|
||||
getForm.querySelector('input[name="path"]').value = params.get('path');
|
||||
} else if (method == 'POST') {
|
||||
postForm.closest('details').open = true;
|
||||
getForm.closest('details').open = false;
|
||||
postForm.querySelector('input[name="path"]').value = params.get('path');
|
||||
postForm.querySelector('textarea[name="json"]').value = params.get('json');
|
||||
}
|
||||
}
|
||||
window.addEventListener('hashchange', () => {
|
||||
onFragmentChange();
|
||||
// Animate scroll to top of page
|
||||
window.scrollTo({top: 0, behavior: 'smooth'});
|
||||
});
|
||||
|
||||
// Cause GET and POST regions to toggle each other
|
||||
var getDetails = getForm.closest('details');
|
||||
var postDetails = postForm.closest('details');
|
||||
getDetails.addEventListener('toggle', (ev) => {
|
||||
if (getDetails.open) {
|
||||
postDetails.open = false;
|
||||
}
|
||||
});
|
||||
postDetails.addEventListener('toggle', (ev) => {
|
||||
if (postDetails.open) {
|
||||
getDetails.open = false;
|
||||
}
|
||||
});
|
||||
|
||||
getForm.addEventListener("submit", (ev) => {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(getForm);
|
||||
// Update URL fragment hash
|
||||
var serialized = new URLSearchParams(formData).toString() + '&method=GET';
|
||||
window.history.pushState({}, "", location.pathname + '#' + serialized);
|
||||
// Send the request
|
||||
var path = formData.get('path');
|
||||
fetch(path, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
}).then((response) => {
|
||||
output.style.display = 'block';
|
||||
document.getElementById('response-status').textContent = response.status;
|
||||
return response.json();
|
||||
}).then((data) => {
|
||||
output.querySelector('pre').innerHTML = jsonFormatHighlight(data);
|
||||
errorList.style.display = 'none';
|
||||
}).catch((error) => {
|
||||
alert(error);
|
||||
});
|
||||
});
|
||||
|
||||
postForm.addEventListener("submit", (ev) => {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(postForm);
|
||||
// Update URL fragment hash
|
||||
var serialized = new URLSearchParams(formData).toString() + '&method=POST';
|
||||
window.history.pushState({}, "", location.pathname + '#' + serialized);
|
||||
// Send the request
|
||||
var json = formData.get('json');
|
||||
var path = formData.get('path');
|
||||
// Validate JSON
|
||||
if (!json.length) {
|
||||
json = '{}';
|
||||
}
|
||||
try {
|
||||
var data = JSON.parse(json);
|
||||
} catch (err) {
|
||||
alert("Invalid JSON: " + err);
|
||||
return;
|
||||
}
|
||||
// POST JSON to path with content-type application/json
|
||||
fetch(path, {
|
||||
method: 'POST',
|
||||
body: json,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
}).then(r => {
|
||||
document.getElementById('response-status').textContent = r.status;
|
||||
return r.json();
|
||||
}).then(data => {
|
||||
if (data.errors) {
|
||||
errorList.style.display = 'block';
|
||||
errorList.innerHTML = '';
|
||||
data.errors.forEach(error => {
|
||||
var li = document.createElement('li');
|
||||
li.textContent = error;
|
||||
errorList.appendChild(li);
|
||||
});
|
||||
} else {
|
||||
errorList.style.display = 'none';
|
||||
}
|
||||
output.querySelector('pre').innerHTML = jsonFormatHighlight(data);
|
||||
output.style.display = 'block';
|
||||
}).catch(err => {
|
||||
alert("Error: " + err);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
{% if example_links %}
|
||||
<h2>API endpoints</h2>
|
||||
<ul class="bullets">
|
||||
{% for database in example_links %}
|
||||
<li>Database: <strong>{{ database.name }}</strong></li>
|
||||
<ul class="bullets">
|
||||
{% for link in database.links %}
|
||||
<li><a href="{{ api_path(link) }}">{{ link.path }}</a> - {{ link.label }} </li>
|
||||
{% endfor %}
|
||||
{% for table in database.tables %}
|
||||
<li><strong>{{ table.name }}</strong>
|
||||
<ul class="bullets">
|
||||
{% for link in table.links %}
|
||||
<li><a href="{{ api_path(link) }}">{{ link.path }}</a> - {{ link.label }} </li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -19,7 +19,7 @@
|
|||
<div class="not-footer">
|
||||
<header><nav>{% block nav %}{% block crumbs %}{{ crumbs.nav(request=request) }}{% endblock %}
|
||||
{% set links = menu_links() %}{% if links or show_logout %}
|
||||
<details class="nav-menu">
|
||||
<details class="nav-menu details-menu">
|
||||
<summary><svg aria-labelledby="nav-menu-svg-title" role="img"
|
||||
fill="currentColor" stroke="currentColor" xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16" width="16" height="16">
|
||||
|
|
|
|||
|
|
@ -0,0 +1,83 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Create an API token{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>Create an API token</h1>
|
||||
|
||||
<p>This token will allow API access with the same abilities as your current user.</p>
|
||||
|
||||
{% if errors %}
|
||||
{% for error in errors %}
|
||||
<p class="message-error">{{ error }}</p>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
<form action="{{ urls.path('-/create-token') }}" method="post">
|
||||
<div>
|
||||
<div class="select-wrapper" style="width: unset">
|
||||
<select name="expire_type">
|
||||
<option value="">Token never expires</option>
|
||||
<option value="minutes">Expires after X minutes</option>
|
||||
<option value="hours">Expires after X hours</option>
|
||||
<option value="days">Expires after X days</option>
|
||||
</select>
|
||||
</div>
|
||||
<input type="text" name="expire_duration" style="width: 10%">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<input type="submit" value="Create token">
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{% if token %}
|
||||
<div>
|
||||
<h2>Your API token</h2>
|
||||
<form>
|
||||
<input type="text" class="copyable" style="width: 40%" value="{{ token }}">
|
||||
<span class="copy-link-wrapper"></span>
|
||||
</form>
|
||||
<!--- show token in a <details> -->
|
||||
<details style="margin-top: 1em">
|
||||
<summary>Token details</summary>
|
||||
<pre>{{ token_bits|tojson }}</pre>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<script>
|
||||
var expireDuration = document.querySelector('input[name="expire_duration"]');
|
||||
expireDuration.style.display = 'none';
|
||||
var expireType = document.querySelector('select[name="expire_type"]');
|
||||
function showHideExpireDuration() {
|
||||
if (expireType.value) {
|
||||
expireDuration.style.display = 'inline';
|
||||
expireDuration.setAttribute("placeholder", expireType.value.replace("Expires after X ", ""));
|
||||
} else {
|
||||
expireDuration.style.display = 'none';
|
||||
}
|
||||
}
|
||||
showHideExpireDuration();
|
||||
expireType.addEventListener('change', showHideExpireDuration);
|
||||
var copyInput = document.querySelector(".copyable");
|
||||
if (copyInput) {
|
||||
var wrapper = document.querySelector(".copy-link-wrapper");
|
||||
var button = document.createElement("button");
|
||||
button.className = "copyable-copy-button";
|
||||
button.setAttribute("type", "button");
|
||||
button.innerHTML = "Copy to clipboard";
|
||||
button.onclick = (ev) => {
|
||||
ev.preventDefault();
|
||||
copyInput.select();
|
||||
document.execCommand("copy");
|
||||
button.innerHTML = "Copied!";
|
||||
setTimeout(() => {
|
||||
button.innerHTML = "Copy to clipboard";
|
||||
}, 1500);
|
||||
};
|
||||
wrapper.appendChild(button);
|
||||
wrapper.insertAdjacentElement("afterbegin", button);
|
||||
}
|
||||
</script>
|
||||
|
||||
{% endblock %}
|
||||
|
|
@ -13,7 +13,7 @@
|
|||
<div class="page-header" style="border-color: #{{ database_color(database) }}">
|
||||
<h1>{{ metadata.title or database }}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set links = database_actions() %}{% if links %}
|
||||
<details class="actions-menu-links">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@
|
|||
<p class="crumbs">
|
||||
<a href="/">home</a>
|
||||
</p>
|
||||
<details class="nav-menu">
|
||||
<details class="nav-menu details-menu">
|
||||
<summary><svg aria-labelledby="nav-menu-svg-title" role="img"
|
||||
fill="currentColor" stroke="currentColor" xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16" width="16" height="16">
|
||||
|
|
@ -96,7 +96,7 @@
|
|||
<section class="content">
|
||||
<div class="page-header" style="border-color: #ff0000">
|
||||
<h1>fixtures</h1>
|
||||
<details class="actions-menu-links">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
|
|
@ -158,7 +158,7 @@
|
|||
<section class="content">
|
||||
<div class="page-header" style="border-color: #ff0000">
|
||||
<h1>roadside_attraction_characteristics</h1>
|
||||
<details class="actions-menu-links">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
|
|
|
|||
|
|
@ -19,11 +19,97 @@
|
|||
.check-action, .check-when, .check-result {
|
||||
font-size: 1.3em;
|
||||
}
|
||||
textarea {
|
||||
height: 10em;
|
||||
width: 95%;
|
||||
box-sizing: border-box;
|
||||
padding: 0.5em;
|
||||
border: 2px dotted black;
|
||||
}
|
||||
.two-col {
|
||||
display: inline-block;
|
||||
width: 48%;
|
||||
}
|
||||
.two-col label {
|
||||
width: 48%;
|
||||
}
|
||||
@media only screen and (max-width: 576px) {
|
||||
.two-col {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<h1>Permission check testing tool</h1>
|
||||
|
||||
<p>This tool lets you simulate an actor and a permission check for that actor.</p>
|
||||
|
||||
<form action="{{ urls.path('-/permissions') }}" id="debug-post" method="post" style="margin-bottom: 1em">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<div class="two-col">
|
||||
<p><label>Actor</label></p>
|
||||
<textarea name="actor">{% if actor_input %}{{ actor_input }}{% else %}{"id": "root"}{% endif %}</textarea>
|
||||
</div>
|
||||
<div class="two-col" style="vertical-align: top">
|
||||
<p><label for="permission" style="display:block">Permission</label>
|
||||
<select name="permission" id="permission">
|
||||
{% for permission in permissions %}
|
||||
<option value="{{ permission.0 }}">{{ permission.name }} (default {{ permission.default }})</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<p><label for="resource_1">Database name</label><input type="text" id="resource_1" name="resource_1"></p>
|
||||
<p><label for="resource_2">Table or query name</label><input type="text" id="resource_2" name="resource_2"></p>
|
||||
</div>
|
||||
<div style="margin-top: 1em;">
|
||||
<input type="submit" value="Simulate permission check">
|
||||
</div>
|
||||
<pre style="margin-top: 1em" id="debugResult"></pre>
|
||||
</form>
|
||||
|
||||
<script>
|
||||
var rawPerms = {{ permissions|tojson }};
|
||||
var permissions = Object.fromEntries(rawPerms.map(([label, abbr, needs_resource_1, needs_resource_2, def]) => [label, {needs_resource_1, needs_resource_2, def}]))
|
||||
var permissionSelect = document.getElementById('permission');
|
||||
var resource1 = document.getElementById('resource_1');
|
||||
var resource2 = document.getElementById('resource_2');
|
||||
function updateResourceVisibility() {
|
||||
var permission = permissionSelect.value;
|
||||
var {needs_resource_1, needs_resource_2} = permissions[permission];
|
||||
if (needs_resource_1) {
|
||||
resource1.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource1.closest('p').style.display = 'none';
|
||||
}
|
||||
if (needs_resource_2) {
|
||||
resource2.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource2.closest('p').style.display = 'none';
|
||||
}
|
||||
}
|
||||
permissionSelect.addEventListener('change', updateResourceVisibility);
|
||||
updateResourceVisibility();
|
||||
|
||||
// When #debug-post form is submitted, use fetch() to POST data
|
||||
var debugPost = document.getElementById('debug-post');
|
||||
var debugResult = document.getElementById('debugResult');
|
||||
debugPost.addEventListener('submit', function(ev) {
|
||||
ev.preventDefault();
|
||||
var formData = new FormData(debugPost);
|
||||
console.log(formData);
|
||||
fetch(debugPost.action, {
|
||||
method: 'POST',
|
||||
body: new URLSearchParams(formData),
|
||||
}).then(function(response) {
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
debugResult.innerText = JSON.stringify(data, null, 4);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<h1>Recent permissions checks</h1>
|
||||
|
||||
{% for check in permission_checks %}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@
|
|||
<div class="page-header" style="border-color: #{{ database_color(database) }}">
|
||||
<h1>{{ metadata.title or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set links = table_actions() %}{% if links %}
|
||||
<details class="actions-menu-links">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
|
|
|
|||
|
|
@ -1193,3 +1193,18 @@ def truncate_url(url, length):
|
|||
rest, ext = bits
|
||||
return rest[: length - 1 - len(ext)] + "…." + ext
|
||||
return url[: length - 1] + "…"
|
||||
|
||||
|
||||
async def row_sql_params_pks(db, table, pk_values):
|
||||
pks = await db.primary_keys(table)
|
||||
use_rowid = not pks
|
||||
select = "*"
|
||||
if use_rowid:
|
||||
select = "rowid, *"
|
||||
pks = ["rowid"]
|
||||
wheres = [f'"{pk}"=:p{i}' for i, pk in enumerate(pks)]
|
||||
sql = f"select {select} from {escape_sqlite(table)} where {' AND '.join(wheres)}"
|
||||
params = {}
|
||||
for i, pk_value in enumerate(pk_values):
|
||||
params[f"p{i}"] = pk_value
|
||||
return sql, params, pks
|
||||
|
|
|
|||
|
|
@ -21,6 +21,27 @@ class NotFound(Base400):
|
|||
status = 404
|
||||
|
||||
|
||||
class DatabaseNotFound(NotFound):
|
||||
def __init__(self, message, database_name):
|
||||
super().__init__(message)
|
||||
self.database_name = database_name
|
||||
|
||||
|
||||
class TableNotFound(NotFound):
|
||||
def __init__(self, message, database_name, table):
|
||||
super().__init__(message)
|
||||
self.database_name = database_name
|
||||
self.table = table
|
||||
|
||||
|
||||
class RowNotFound(NotFound):
|
||||
def __init__(self, message, database_name, table, pk_values):
|
||||
super().__init__(message)
|
||||
self.database_name = database_name
|
||||
self.table_name = table
|
||||
self.pk_values = pk_values
|
||||
|
||||
|
||||
class Forbidden(Base400):
|
||||
status = 403
|
||||
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ class TestClient:
|
|||
method="GET",
|
||||
cookies=None,
|
||||
if_none_match=None,
|
||||
headers=None,
|
||||
):
|
||||
return await self._request(
|
||||
path=path,
|
||||
|
|
@ -70,6 +71,7 @@ class TestClient:
|
|||
method=method,
|
||||
cookies=cookies,
|
||||
if_none_match=if_none_match,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
@async_to_sync
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = "0.63.2"
|
||||
__version__ = "1.0a0"
|
||||
__version_info__ = tuple(__version__.split("."))
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ from datasette.utils import (
|
|||
InvalidSql,
|
||||
LimitedWriter,
|
||||
call_with_supported_arguments,
|
||||
tilde_decode,
|
||||
path_from_row_pks,
|
||||
path_with_added_args,
|
||||
path_with_removed_args,
|
||||
|
|
@ -69,20 +68,31 @@ class BaseView:
|
|||
def database_color(self, database):
|
||||
return "ff0000"
|
||||
|
||||
async def options(self, request, *args, **kwargs):
|
||||
async def method_not_allowed(self, request):
|
||||
print(request.headers)
|
||||
if (
|
||||
request.path.endswith(".json")
|
||||
or request.headers.get("content-type") == "application/json"
|
||||
):
|
||||
return Response.json(
|
||||
{"ok": False, "error": "Method not allowed"}, status=405
|
||||
)
|
||||
return Response.text("Method not allowed", status=405)
|
||||
|
||||
async def options(self, request, *args, **kwargs):
|
||||
return await self.method_not_allowed(request)
|
||||
|
||||
async def post(self, request, *args, **kwargs):
|
||||
return Response.text("Method not allowed", status=405)
|
||||
return await self.method_not_allowed(request)
|
||||
|
||||
async def put(self, request, *args, **kwargs):
|
||||
return Response.text("Method not allowed", status=405)
|
||||
return await self.method_not_allowed(request)
|
||||
|
||||
async def patch(self, request, *args, **kwargs):
|
||||
return Response.text("Method not allowed", status=405)
|
||||
return await self.method_not_allowed(request)
|
||||
|
||||
async def delete(self, request, *args, **kwargs):
|
||||
return Response.text("Method not allowed", status=405)
|
||||
return await self.method_not_allowed(request)
|
||||
|
||||
async def dispatch_request(self, request):
|
||||
if self.ds:
|
||||
|
|
@ -335,13 +345,9 @@ class DataView(BaseView):
|
|||
return AsgiStream(stream_fn, headers=headers, content_type=content_type)
|
||||
|
||||
async def get(self, request):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
db = await self.ds.resolve_database(request)
|
||||
database = db.name
|
||||
database_route = db.route
|
||||
|
||||
_format = request.url_vars["format"]
|
||||
data_kwargs = {}
|
||||
|
|
@ -507,7 +513,6 @@ class DataView(BaseView):
|
|||
if key not in ("_labels", "_facet", "_size")
|
||||
]
|
||||
+ [("_size", "max")],
|
||||
"datasette_version": __version__,
|
||||
"settings": self.ds.settings_dict(),
|
||||
},
|
||||
}
|
||||
|
|
@ -536,3 +541,7 @@ class DataView(BaseView):
|
|||
if self.ds.cors:
|
||||
add_cors_headers(response.headers)
|
||||
return response
|
||||
|
||||
|
||||
def _error(messages, status=400):
|
||||
return Response.json({"ok": False, "errors": messages}, status=status)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import itertools
|
|||
import json
|
||||
from markupsafe import Markup, escape
|
||||
from urllib.parse import parse_qsl, urlencode
|
||||
import re
|
||||
import sqlite_utils
|
||||
|
||||
import markupsafe
|
||||
|
||||
|
|
@ -26,18 +28,14 @@ from datasette.utils import (
|
|||
from datasette.utils.asgi import AsgiFileDownload, NotFound, Response, Forbidden
|
||||
from datasette.plugins import pm
|
||||
|
||||
from .base import DatasetteError, DataView
|
||||
from .base import BaseView, DatasetteError, DataView, _error
|
||||
|
||||
|
||||
class DatabaseView(DataView):
|
||||
name = "database"
|
||||
|
||||
async def data(self, request, default_labels=False, _size=None):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
db = await self.ds.resolve_database(request)
|
||||
database = db.name
|
||||
|
||||
visible, private = await self.ds.check_visibility(
|
||||
|
|
@ -226,11 +224,7 @@ class QueryView(DataView):
|
|||
named_parameters=None,
|
||||
write=False,
|
||||
):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
db = await self.ds.resolve_database(request)
|
||||
database = db.name
|
||||
params = {key: request.args.get(key) for key in request.args}
|
||||
if "sql" in params:
|
||||
|
|
@ -563,6 +557,266 @@ class MagicParameters(dict):
|
|||
return super().__getitem__(key)
|
||||
|
||||
|
||||
class TableCreateView(BaseView):
|
||||
name = "table-create"
|
||||
|
||||
_valid_keys = {"table", "rows", "row", "columns", "pk"}
|
||||
_supported_column_types = {
|
||||
"text",
|
||||
"integer",
|
||||
"float",
|
||||
"blob",
|
||||
}
|
||||
# Any string that does not contain a newline or start with sqlite_
|
||||
_table_name_re = re.compile(r"^(?!sqlite_)[^\n]+$")
|
||||
|
||||
def __init__(self, datasette):
|
||||
self.ds = datasette
|
||||
|
||||
async def post(self, request):
|
||||
db = await self.ds.resolve_database(request)
|
||||
database_name = db.name
|
||||
|
||||
# Must have create-table permission
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "create-table", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
|
||||
body = await request.post_body()
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except json.JSONDecodeError as e:
|
||||
return _error(["Invalid JSON: {}".format(e)])
|
||||
|
||||
if not isinstance(data, dict):
|
||||
return _error(["JSON must be an object"])
|
||||
|
||||
invalid_keys = set(data.keys()) - self._valid_keys
|
||||
if invalid_keys:
|
||||
return _error(["Invalid keys: {}".format(", ".join(invalid_keys))])
|
||||
|
||||
table_name = data.get("table")
|
||||
if not table_name:
|
||||
return _error(["Table is required"])
|
||||
|
||||
if not self._table_name_re.match(table_name):
|
||||
return _error(["Invalid table name"])
|
||||
|
||||
columns = data.get("columns")
|
||||
rows = data.get("rows")
|
||||
row = data.get("row")
|
||||
if not columns and not rows and not row:
|
||||
return _error(["columns, rows or row is required"])
|
||||
|
||||
if rows and row:
|
||||
return _error(["Cannot specify both rows and row"])
|
||||
|
||||
if columns:
|
||||
if rows or row:
|
||||
return _error(["Cannot specify columns with rows or row"])
|
||||
if not isinstance(columns, list):
|
||||
return _error(["columns must be a list"])
|
||||
for column in columns:
|
||||
if not isinstance(column, dict):
|
||||
return _error(["columns must be a list of objects"])
|
||||
if not column.get("name") or not isinstance(column.get("name"), str):
|
||||
return _error(["Column name is required"])
|
||||
if not column.get("type"):
|
||||
column["type"] = "text"
|
||||
if column["type"] not in self._supported_column_types:
|
||||
return _error(
|
||||
["Unsupported column type: {}".format(column["type"])]
|
||||
)
|
||||
# No duplicate column names
|
||||
dupes = {c["name"] for c in columns if columns.count(c) > 1}
|
||||
if dupes:
|
||||
return _error(["Duplicate column name: {}".format(", ".join(dupes))])
|
||||
|
||||
if row:
|
||||
rows = [row]
|
||||
|
||||
if rows:
|
||||
if not isinstance(rows, list):
|
||||
return _error(["rows must be a list"])
|
||||
for row in rows:
|
||||
if not isinstance(row, dict):
|
||||
return _error(["rows must be a list of objects"])
|
||||
|
||||
pk = data.get("pk")
|
||||
if pk:
|
||||
if not isinstance(pk, str):
|
||||
return _error(["pk must be a string"])
|
||||
|
||||
def create_table(conn):
|
||||
table = sqlite_utils.Database(conn)[table_name]
|
||||
if rows:
|
||||
table.insert_all(rows, pk=pk)
|
||||
else:
|
||||
table.create(
|
||||
{c["name"]: c["type"] for c in columns},
|
||||
pk=pk,
|
||||
)
|
||||
return table.schema
|
||||
|
||||
try:
|
||||
schema = await db.execute_write_fn(create_table)
|
||||
except Exception as e:
|
||||
return _error([str(e)])
|
||||
table_url = self.ds.absolute_url(
|
||||
request, self.ds.urls.table(db.name, table_name)
|
||||
)
|
||||
table_api_url = self.ds.absolute_url(
|
||||
request, self.ds.urls.table(db.name, table_name, format="json")
|
||||
)
|
||||
details = {
|
||||
"ok": True,
|
||||
"database": db.name,
|
||||
"table": table_name,
|
||||
"table_url": table_url,
|
||||
"table_api_url": table_api_url,
|
||||
"schema": schema,
|
||||
}
|
||||
if rows:
|
||||
details["row_count"] = len(rows)
|
||||
return Response.json(details, status=201)
|
||||
|
||||
|
||||
class TableCreateView(BaseView):
|
||||
name = "table-create"
|
||||
|
||||
_valid_keys = {"table", "rows", "row", "columns", "pk", "pks"}
|
||||
_supported_column_types = {
|
||||
"text",
|
||||
"integer",
|
||||
"float",
|
||||
"blob",
|
||||
}
|
||||
# Any string that does not contain a newline or start with sqlite_
|
||||
_table_name_re = re.compile(r"^(?!sqlite_)[^\n]+$")
|
||||
|
||||
def __init__(self, datasette):
|
||||
self.ds = datasette
|
||||
|
||||
async def post(self, request):
|
||||
db = await self.ds.resolve_database(request)
|
||||
database_name = db.name
|
||||
|
||||
# Must have create-table permission
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "create-table", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
|
||||
body = await request.post_body()
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except json.JSONDecodeError as e:
|
||||
return _error(["Invalid JSON: {}".format(e)])
|
||||
|
||||
if not isinstance(data, dict):
|
||||
return _error(["JSON must be an object"])
|
||||
|
||||
invalid_keys = set(data.keys()) - self._valid_keys
|
||||
if invalid_keys:
|
||||
return _error(["Invalid keys: {}".format(", ".join(invalid_keys))])
|
||||
|
||||
table_name = data.get("table")
|
||||
if not table_name:
|
||||
return _error(["Table is required"])
|
||||
|
||||
if not self._table_name_re.match(table_name):
|
||||
return _error(["Invalid table name"])
|
||||
|
||||
columns = data.get("columns")
|
||||
rows = data.get("rows")
|
||||
row = data.get("row")
|
||||
if not columns and not rows and not row:
|
||||
return _error(["columns, rows or row is required"])
|
||||
|
||||
if rows and row:
|
||||
return _error(["Cannot specify both rows and row"])
|
||||
|
||||
if columns:
|
||||
if rows or row:
|
||||
return _error(["Cannot specify columns with rows or row"])
|
||||
if not isinstance(columns, list):
|
||||
return _error(["columns must be a list"])
|
||||
for column in columns:
|
||||
if not isinstance(column, dict):
|
||||
return _error(["columns must be a list of objects"])
|
||||
if not column.get("name") or not isinstance(column.get("name"), str):
|
||||
return _error(["Column name is required"])
|
||||
if not column.get("type"):
|
||||
column["type"] = "text"
|
||||
if column["type"] not in self._supported_column_types:
|
||||
return _error(
|
||||
["Unsupported column type: {}".format(column["type"])]
|
||||
)
|
||||
# No duplicate column names
|
||||
dupes = {c["name"] for c in columns if columns.count(c) > 1}
|
||||
if dupes:
|
||||
return _error(["Duplicate column name: {}".format(", ".join(dupes))])
|
||||
|
||||
if row:
|
||||
rows = [row]
|
||||
|
||||
if rows:
|
||||
if not isinstance(rows, list):
|
||||
return _error(["rows must be a list"])
|
||||
for row in rows:
|
||||
if not isinstance(row, dict):
|
||||
return _error(["rows must be a list of objects"])
|
||||
|
||||
pk = data.get("pk")
|
||||
pks = data.get("pks")
|
||||
|
||||
if pk and pks:
|
||||
return _error(["Cannot specify both pk and pks"])
|
||||
if pk:
|
||||
if not isinstance(pk, str):
|
||||
return _error(["pk must be a string"])
|
||||
if pks:
|
||||
if not isinstance(pks, list):
|
||||
return _error(["pks must be a list"])
|
||||
for pk in pks:
|
||||
if not isinstance(pk, str):
|
||||
return _error(["pks must be a list of strings"])
|
||||
|
||||
def create_table(conn):
|
||||
table = sqlite_utils.Database(conn)[table_name]
|
||||
if rows:
|
||||
table.insert_all(rows, pk=pks or pk)
|
||||
else:
|
||||
table.create(
|
||||
{c["name"]: c["type"] for c in columns},
|
||||
pk=pks or pk,
|
||||
)
|
||||
return table.schema
|
||||
|
||||
try:
|
||||
schema = await db.execute_write_fn(create_table)
|
||||
except Exception as e:
|
||||
return _error([str(e)])
|
||||
table_url = self.ds.absolute_url(
|
||||
request, self.ds.urls.table(db.name, table_name)
|
||||
)
|
||||
table_api_url = self.ds.absolute_url(
|
||||
request, self.ds.urls.table(db.name, table_name, format="json")
|
||||
)
|
||||
details = {
|
||||
"ok": True,
|
||||
"database": db.name,
|
||||
"table": table_name,
|
||||
"table_url": table_url,
|
||||
"table_api_url": table_api_url,
|
||||
"schema": schema,
|
||||
}
|
||||
if rows:
|
||||
details["row_count"] = len(rows)
|
||||
return Response.json(details, status=201)
|
||||
|
||||
|
||||
async def _table_columns(datasette, database_name):
|
||||
internal = datasette.get_database("_internal")
|
||||
result = await internal.execute(
|
||||
|
|
@ -572,4 +826,8 @@ async def _table_columns(datasette, database_name):
|
|||
table_columns = {}
|
||||
for row in result.rows:
|
||||
table_columns.setdefault(row["table_name"], []).append(row["name"])
|
||||
# Add views
|
||||
db = datasette.get_database(database_name)
|
||||
for view_name in await db.view_names():
|
||||
table_columns[view_name] = []
|
||||
return table_columns
|
||||
|
|
|
|||
|
|
@ -1,26 +1,26 @@
|
|||
from datasette.utils.asgi import NotFound, Forbidden
|
||||
from datasette.utils.asgi import NotFound, Forbidden, Response
|
||||
from datasette.database import QueryInterrupted
|
||||
from .base import DataView
|
||||
from .base import DataView, BaseView, _error
|
||||
from datasette.utils import (
|
||||
tilde_decode,
|
||||
urlsafe_components,
|
||||
to_css_class,
|
||||
escape_sqlite,
|
||||
row_sql_params_pks,
|
||||
)
|
||||
from .table import _sql_params_pks, display_columns_and_rows
|
||||
import json
|
||||
import sqlite_utils
|
||||
from .table import display_columns_and_rows
|
||||
|
||||
|
||||
class RowView(DataView):
|
||||
name = "row"
|
||||
|
||||
async def data(self, request, default_labels=False):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
table = tilde_decode(request.url_vars["table"])
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
database = db.name
|
||||
resolved = await self.ds.resolve_row(request)
|
||||
database = resolved.db.name
|
||||
table = resolved.table
|
||||
pk_values = resolved.pk_values
|
||||
|
||||
# Ensure user has permission to view this row
|
||||
visible, private = await self.ds.check_visibility(
|
||||
|
|
@ -34,14 +34,9 @@ class RowView(DataView):
|
|||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this table")
|
||||
|
||||
pk_values = urlsafe_components(request.url_vars["pks"])
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
database = db.name
|
||||
sql, params, pks = await _sql_params_pks(db, table, pk_values)
|
||||
results = await db.execute(sql, params, truncate=True)
|
||||
results = await resolved.db.execute(
|
||||
resolved.sql, resolved.params, truncate=True
|
||||
)
|
||||
columns = [r[0] for r in results.description]
|
||||
rows = list(results.rows)
|
||||
if not rows:
|
||||
|
|
@ -82,7 +77,7 @@ class RowView(DataView):
|
|||
"table": table,
|
||||
"rows": rows,
|
||||
"columns": columns,
|
||||
"primary_keys": pks,
|
||||
"primary_keys": resolved.pks,
|
||||
"primary_key_values": pk_values,
|
||||
"units": self.ds.table_metadata(database, table).get("units", {}),
|
||||
}
|
||||
|
|
@ -146,3 +141,100 @@ class RowView(DataView):
|
|||
)
|
||||
foreign_key_tables.append({**fk, **{"count": count, "link": link}})
|
||||
return foreign_key_tables
|
||||
|
||||
|
||||
class RowError(Exception):
|
||||
def __init__(self, error):
|
||||
self.error = error
|
||||
|
||||
|
||||
async def _resolve_row_and_check_permission(datasette, request, permission):
|
||||
from datasette.app import DatabaseNotFound, TableNotFound, RowNotFound
|
||||
|
||||
try:
|
||||
resolved = await datasette.resolve_row(request)
|
||||
except DatabaseNotFound as e:
|
||||
return False, _error(["Database not found: {}".format(e.database_name)], 404)
|
||||
except TableNotFound as e:
|
||||
return False, _error(["Table not found: {}".format(e.table)], 404)
|
||||
except RowNotFound as e:
|
||||
return False, _error(["Record not found: {}".format(e.pk_values)], 404)
|
||||
|
||||
# Ensure user has permission to delete this row
|
||||
if not await datasette.permission_allowed(
|
||||
request.actor, permission, resource=(resolved.db.name, resolved.table)
|
||||
):
|
||||
return False, _error(["Permission denied"], 403)
|
||||
|
||||
return True, resolved
|
||||
|
||||
|
||||
class RowDeleteView(BaseView):
|
||||
name = "row-delete"
|
||||
|
||||
def __init__(self, datasette):
|
||||
self.ds = datasette
|
||||
|
||||
async def post(self, request):
|
||||
ok, resolved = await _resolve_row_and_check_permission(
|
||||
self.ds, request, "delete-row"
|
||||
)
|
||||
if not ok:
|
||||
return resolved
|
||||
|
||||
# Delete table
|
||||
def delete_row(conn):
|
||||
sqlite_utils.Database(conn)[resolved.table].delete(resolved.pk_values)
|
||||
|
||||
try:
|
||||
await resolved.db.execute_write_fn(delete_row)
|
||||
except Exception as e:
|
||||
return _error([str(e)], 500)
|
||||
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
||||
|
||||
class RowUpdateView(BaseView):
|
||||
name = "row-update"
|
||||
|
||||
def __init__(self, datasette):
|
||||
self.ds = datasette
|
||||
|
||||
async def post(self, request):
|
||||
ok, resolved = await _resolve_row_and_check_permission(
|
||||
self.ds, request, "update-row"
|
||||
)
|
||||
if not ok:
|
||||
return resolved
|
||||
|
||||
body = await request.post_body()
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except json.JSONDecodeError as e:
|
||||
return _error(["Invalid JSON: {}".format(e)])
|
||||
|
||||
if not isinstance(data, dict):
|
||||
return _error(["JSON must be a dictionary"])
|
||||
if not "update" in data or not isinstance(data["update"], dict):
|
||||
return _error(["JSON must contain an update dictionary"])
|
||||
|
||||
update = data["update"]
|
||||
|
||||
def update_row(conn):
|
||||
sqlite_utils.Database(conn)[resolved.table].update(
|
||||
resolved.pk_values, update
|
||||
)
|
||||
|
||||
try:
|
||||
await resolved.db.execute_write_fn(update_row)
|
||||
except Exception as e:
|
||||
return _error([str(e)], 400)
|
||||
|
||||
result = {"ok": True}
|
||||
if data.get("return"):
|
||||
results = await resolved.db.execute(
|
||||
resolved.sql, resolved.params, truncate=True
|
||||
)
|
||||
rows = list(results.rows)
|
||||
result["row"] = dict(rows[0])
|
||||
return Response.json(result, status=200)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
import json
|
||||
from datasette.permissions import PERMISSIONS
|
||||
from datasette.utils.asgi import Response, Forbidden
|
||||
from datasette.utils import actor_matches_allow, add_cors_headers
|
||||
from datasette.permissions import PERMISSIONS
|
||||
from .base import BaseView
|
||||
import secrets
|
||||
import time
|
||||
import urllib
|
||||
|
||||
|
||||
class JsonDataView(BaseView):
|
||||
|
|
@ -102,7 +106,39 @@ class PermissionsDebugView(BaseView):
|
|||
["permissions_debug.html"],
|
||||
request,
|
||||
# list() avoids error if check is performed during template render:
|
||||
{"permission_checks": list(reversed(self.ds._permission_checks))},
|
||||
{
|
||||
"permission_checks": list(reversed(self.ds._permission_checks)),
|
||||
"permissions": PERMISSIONS,
|
||||
},
|
||||
)
|
||||
|
||||
async def post(self, request):
|
||||
await self.ds.ensure_permissions(request.actor, ["view-instance"])
|
||||
if not await self.ds.permission_allowed(request.actor, "permissions-debug"):
|
||||
raise Forbidden("Permission denied")
|
||||
vars = await request.post_vars()
|
||||
actor = json.loads(vars["actor"])
|
||||
permission = vars["permission"]
|
||||
resource_1 = vars["resource_1"]
|
||||
resource_2 = vars["resource_2"]
|
||||
resource = []
|
||||
if resource_1:
|
||||
resource.append(resource_1)
|
||||
if resource_2:
|
||||
resource.append(resource_2)
|
||||
resource = tuple(resource)
|
||||
if len(resource) == 1:
|
||||
resource = resource[0]
|
||||
result = await self.ds.permission_allowed(
|
||||
actor, permission, resource, default="USE_DEFAULT"
|
||||
)
|
||||
return Response.json(
|
||||
{
|
||||
"actor": actor,
|
||||
"permission": permission,
|
||||
"resource": resource,
|
||||
"result": result,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -163,3 +199,197 @@ class MessagesDebugView(BaseView):
|
|||
else:
|
||||
datasette.add_message(request, message, getattr(datasette, message_type))
|
||||
return Response.redirect(self.ds.urls.instance())
|
||||
|
||||
|
||||
class CreateTokenView(BaseView):
|
||||
name = "create_token"
|
||||
has_json_alternate = False
|
||||
|
||||
def check_permission(self, request):
|
||||
if not self.ds.setting("allow_signed_tokens"):
|
||||
raise Forbidden("Signed tokens are not enabled for this Datasette instance")
|
||||
if not request.actor:
|
||||
raise Forbidden("You must be logged in to create a token")
|
||||
if not request.actor.get("id"):
|
||||
raise Forbidden(
|
||||
"You must be logged in as an actor with an ID to create a token"
|
||||
)
|
||||
if request.actor.get("token"):
|
||||
raise Forbidden(
|
||||
"Token authentication cannot be used to create additional tokens"
|
||||
)
|
||||
|
||||
async def get(self, request):
|
||||
self.check_permission(request)
|
||||
return await self.render(
|
||||
["create_token.html"],
|
||||
request,
|
||||
{"actor": request.actor},
|
||||
)
|
||||
|
||||
async def post(self, request):
|
||||
self.check_permission(request)
|
||||
post = await request.post_vars()
|
||||
errors = []
|
||||
duration = None
|
||||
if post.get("expire_type"):
|
||||
duration_string = post.get("expire_duration")
|
||||
if (
|
||||
not duration_string
|
||||
or not duration_string.isdigit()
|
||||
or not int(duration_string) > 0
|
||||
):
|
||||
errors.append("Invalid expire duration")
|
||||
else:
|
||||
unit = post["expire_type"]
|
||||
if unit == "minutes":
|
||||
duration = int(duration_string) * 60
|
||||
elif unit == "hours":
|
||||
duration = int(duration_string) * 60 * 60
|
||||
elif unit == "days":
|
||||
duration = int(duration_string) * 60 * 60 * 24
|
||||
else:
|
||||
errors.append("Invalid expire duration unit")
|
||||
token_bits = None
|
||||
token = None
|
||||
if not errors:
|
||||
token_bits = {
|
||||
"a": request.actor["id"],
|
||||
"t": int(time.time()),
|
||||
}
|
||||
if duration:
|
||||
token_bits["d"] = duration
|
||||
token = "dstok_{}".format(self.ds.sign(token_bits, "token"))
|
||||
return await self.render(
|
||||
["create_token.html"],
|
||||
request,
|
||||
{
|
||||
"actor": request.actor,
|
||||
"errors": errors,
|
||||
"token": token,
|
||||
"token_bits": token_bits,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ApiExplorerView(BaseView):
|
||||
name = "api_explorer"
|
||||
has_json_alternate = False
|
||||
|
||||
async def example_links(self, request):
|
||||
databases = []
|
||||
for name, db in self.ds.databases.items():
|
||||
if name == "_internal":
|
||||
continue
|
||||
database_visible, _ = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-database",
|
||||
name,
|
||||
)
|
||||
if not database_visible:
|
||||
continue
|
||||
tables = []
|
||||
table_names = await db.table_names()
|
||||
for table in table_names:
|
||||
visible, _ = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, table),
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
table_links = []
|
||||
tables.append({"name": table, "links": table_links})
|
||||
table_links.append(
|
||||
{
|
||||
"label": "Get rows for {}".format(table),
|
||||
"method": "GET",
|
||||
"path": self.ds.urls.table(name, table, format="json")
|
||||
+ "?_shape=objects".format(name, table),
|
||||
}
|
||||
)
|
||||
# If not mutable don't show any write APIs
|
||||
if not db.is_mutable:
|
||||
continue
|
||||
|
||||
if await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", (name, table)
|
||||
):
|
||||
pks = await db.primary_keys(table)
|
||||
table_links.append(
|
||||
{
|
||||
"path": self.ds.urls.table(name, table) + "/-/insert",
|
||||
"method": "POST",
|
||||
"label": "Insert rows into {}".format(table),
|
||||
"json": {
|
||||
"rows": [
|
||||
{
|
||||
column: None
|
||||
for column in await db.table_columns(table)
|
||||
if column not in pks
|
||||
}
|
||||
]
|
||||
},
|
||||
}
|
||||
)
|
||||
if await self.ds.permission_allowed(
|
||||
request.actor, "drop-table", (name, table)
|
||||
):
|
||||
table_links.append(
|
||||
{
|
||||
"path": self.ds.urls.table(name, table) + "/-/drop",
|
||||
"label": "Drop table {}".format(table),
|
||||
"json": {"confirm": False},
|
||||
"method": "POST",
|
||||
}
|
||||
)
|
||||
database_links = []
|
||||
if (
|
||||
await self.ds.permission_allowed(request.actor, "create-table", name)
|
||||
and db.is_mutable
|
||||
):
|
||||
database_links.append(
|
||||
{
|
||||
"path": self.ds.urls.database(name) + "/-/create",
|
||||
"label": "Create table in {}".format(name),
|
||||
"json": {
|
||||
"table": "new_table",
|
||||
"columns": [
|
||||
{"name": "id", "type": "integer"},
|
||||
{"name": "name", "type": "text"},
|
||||
],
|
||||
"pk": "id",
|
||||
},
|
||||
"method": "POST",
|
||||
}
|
||||
)
|
||||
if database_links or tables:
|
||||
databases.append(
|
||||
{
|
||||
"name": name,
|
||||
"links": database_links,
|
||||
"tables": tables,
|
||||
}
|
||||
)
|
||||
return databases
|
||||
|
||||
async def get(self, request):
|
||||
def api_path(link):
|
||||
return "/-/api#{}".format(
|
||||
urllib.parse.urlencode(
|
||||
{
|
||||
key: json.dumps(value, indent=2) if key == "json" else value
|
||||
for key, value in link.items()
|
||||
if key in ("path", "method", "json")
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return await self.render(
|
||||
["api_explorer.html"],
|
||||
request,
|
||||
{
|
||||
"example_links": await self.example_links(request),
|
||||
"api_path": api_path,
|
||||
},
|
||||
)
|
||||
|
|
|
|||
|
|
@ -28,9 +28,10 @@ from datasette.utils import (
|
|||
urlsafe_components,
|
||||
value_as_boolean,
|
||||
)
|
||||
from datasette.utils.asgi import BadRequest, Forbidden, NotFound
|
||||
from datasette.utils.asgi import BadRequest, Forbidden, NotFound, Response
|
||||
from datasette.filters import Filters
|
||||
from .base import DataView, DatasetteError, ureg
|
||||
import sqlite_utils
|
||||
from .base import BaseView, DataView, DatasetteError, ureg, _error
|
||||
from .database import QueryView
|
||||
|
||||
LINK_WITH_LABEL = (
|
||||
|
|
@ -92,26 +93,79 @@ class TableView(DataView):
|
|||
return expandables
|
||||
|
||||
async def post(self, request):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
from datasette.app import TableNotFound
|
||||
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
database_name = db.name
|
||||
table_name = tilde_decode(request.url_vars["table"])
|
||||
# Handle POST to a canned query
|
||||
canned_query = await self.ds.get_canned_query(
|
||||
database_name, table_name, request.actor
|
||||
resolved = await self.ds.resolve_table(request)
|
||||
except TableNotFound as e:
|
||||
# Was this actually a canned query?
|
||||
canned_query = await self.ds.get_canned_query(
|
||||
e.database_name, e.table, request.actor
|
||||
)
|
||||
if canned_query:
|
||||
# Handle POST to a canned query
|
||||
return await QueryView(self.ds).data(
|
||||
request,
|
||||
canned_query["sql"],
|
||||
metadata=canned_query,
|
||||
editable=False,
|
||||
canned_query=e.table,
|
||||
named_parameters=canned_query.get("params"),
|
||||
write=bool(canned_query.get("write")),
|
||||
)
|
||||
|
||||
# Handle POST to a table
|
||||
return await self.table_post(
|
||||
request, resolved.db, resolved.db.name, resolved.table
|
||||
)
|
||||
assert canned_query, "You may only POST to a canned query"
|
||||
return await QueryView(self.ds).data(
|
||||
request,
|
||||
canned_query["sql"],
|
||||
metadata=canned_query,
|
||||
editable=False,
|
||||
canned_query=table_name,
|
||||
named_parameters=canned_query.get("params"),
|
||||
write=bool(canned_query.get("write")),
|
||||
|
||||
async def table_post(self, request, db, database_name, table_name):
|
||||
# Must have insert-row permission
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", resource=(database_name, table_name)
|
||||
):
|
||||
raise Forbidden("Permission denied")
|
||||
if request.headers.get("content-type") != "application/json":
|
||||
# TODO: handle form-encoded data
|
||||
raise BadRequest("Must send JSON data")
|
||||
data = json.loads(await request.post_body())
|
||||
if "insert" not in data:
|
||||
raise BadRequest('Must send a "insert" key containing a dictionary')
|
||||
row = data["insert"]
|
||||
if not isinstance(row, dict):
|
||||
raise BadRequest("insert must be a dictionary")
|
||||
# Verify all columns exist
|
||||
columns = await db.table_columns(table_name)
|
||||
pks = await db.primary_keys(table_name)
|
||||
for key in row:
|
||||
if key not in columns:
|
||||
raise BadRequest("Column not found: {}".format(key))
|
||||
if key in pks:
|
||||
raise BadRequest(
|
||||
"Cannot insert into primary key column: {}".format(key)
|
||||
)
|
||||
# Perform the insert
|
||||
sql = "INSERT INTO [{table}] ({columns}) VALUES ({values})".format(
|
||||
table=escape_sqlite(table_name),
|
||||
columns=", ".join(escape_sqlite(c) for c in row),
|
||||
values=", ".join("?" for c in row),
|
||||
)
|
||||
cursor = await db.execute_write(sql, list(row.values()))
|
||||
# Return the new row
|
||||
rowid = cursor.lastrowid
|
||||
new_row = (
|
||||
await db.execute(
|
||||
"SELECT * FROM [{table}] WHERE rowid = ?".format(
|
||||
table=escape_sqlite(table_name)
|
||||
),
|
||||
[rowid],
|
||||
)
|
||||
).first()
|
||||
return Response.json(
|
||||
{
|
||||
"inserted_row": dict(new_row),
|
||||
},
|
||||
status=201,
|
||||
)
|
||||
|
||||
async def columns_to_select(self, table_columns, pks, request):
|
||||
|
|
@ -164,12 +218,31 @@ class TableView(DataView):
|
|||
_next=None,
|
||||
_size=None,
|
||||
):
|
||||
database_route = tilde_decode(request.url_vars["database"])
|
||||
table_name = tilde_decode(request.url_vars["table"])
|
||||
from datasette.app import TableNotFound
|
||||
|
||||
try:
|
||||
db = self.ds.get_database(route=database_route)
|
||||
except KeyError:
|
||||
raise NotFound("Database not found: {}".format(database_route))
|
||||
resolved = await self.ds.resolve_table(request)
|
||||
except TableNotFound as e:
|
||||
# Was this actually a canned query?
|
||||
canned_query = await self.ds.get_canned_query(
|
||||
e.database_name, e.table, request.actor
|
||||
)
|
||||
# If this is a canned query, not a table, then dispatch to QueryView instead
|
||||
if canned_query:
|
||||
return await QueryView(self.ds).data(
|
||||
request,
|
||||
canned_query["sql"],
|
||||
metadata=canned_query,
|
||||
editable=False,
|
||||
canned_query=e.table,
|
||||
named_parameters=canned_query.get("params"),
|
||||
write=bool(canned_query.get("write")),
|
||||
)
|
||||
else:
|
||||
raise
|
||||
|
||||
table_name = resolved.table
|
||||
db = resolved.db
|
||||
database_name = db.name
|
||||
|
||||
# For performance profiling purposes, ?_noparallel=1 turns off asyncio.gather
|
||||
|
|
@ -186,21 +259,6 @@ class TableView(DataView):
|
|||
_gather_sequential if request.args.get("_noparallel") else _gather_parallel
|
||||
)
|
||||
|
||||
# If this is a canned query, not a table, then dispatch to QueryView instead
|
||||
canned_query = await self.ds.get_canned_query(
|
||||
database_name, table_name, request.actor
|
||||
)
|
||||
if canned_query:
|
||||
return await QueryView(self.ds).data(
|
||||
request,
|
||||
canned_query["sql"],
|
||||
metadata=canned_query,
|
||||
editable=False,
|
||||
canned_query=table_name,
|
||||
named_parameters=canned_query.get("params"),
|
||||
write=bool(canned_query.get("write")),
|
||||
)
|
||||
|
||||
is_view, table_exists = map(
|
||||
bool,
|
||||
await gather(
|
||||
|
|
@ -817,21 +875,6 @@ class TableView(DataView):
|
|||
)
|
||||
|
||||
|
||||
async def _sql_params_pks(db, table, pk_values):
|
||||
pks = await db.primary_keys(table)
|
||||
use_rowid = not pks
|
||||
select = "*"
|
||||
if use_rowid:
|
||||
select = "rowid, *"
|
||||
pks = ["rowid"]
|
||||
wheres = [f'"{pk}"=:p{i}' for i, pk in enumerate(pks)]
|
||||
sql = f"select {select} from {escape_sqlite(table)} where {' AND '.join(wheres)}"
|
||||
params = {}
|
||||
for i, pk_value in enumerate(pk_values):
|
||||
params[f"p{i}"] = pk_value
|
||||
return sql, params, pks
|
||||
|
||||
|
||||
async def display_columns_and_rows(
|
||||
datasette,
|
||||
database_name,
|
||||
|
|
@ -1023,3 +1066,188 @@ async def display_columns_and_rows(
|
|||
}
|
||||
columns = [first_column] + columns
|
||||
return columns, cell_rows
|
||||
|
||||
|
||||
class TableInsertView(BaseView):
|
||||
name = "table-insert"
|
||||
|
||||
def __init__(self, datasette):
|
||||
self.ds = datasette
|
||||
|
||||
async def _validate_data(self, request, db, table_name):
|
||||
errors = []
|
||||
|
||||
def _errors(errors):
|
||||
return None, errors, {}
|
||||
|
||||
if request.headers.get("content-type") != "application/json":
|
||||
# TODO: handle form-encoded data
|
||||
return _errors(["Invalid content-type, must be application/json"])
|
||||
body = await request.post_body()
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except json.JSONDecodeError as e:
|
||||
return _errors(["Invalid JSON: {}".format(e)])
|
||||
if not isinstance(data, dict):
|
||||
return _errors(["JSON must be a dictionary"])
|
||||
keys = data.keys()
|
||||
|
||||
# keys must contain "row" or "rows"
|
||||
if "row" not in keys and "rows" not in keys:
|
||||
return _errors(['JSON must have one or other of "row" or "rows"'])
|
||||
rows = []
|
||||
if "row" in keys:
|
||||
if "rows" in keys:
|
||||
return _errors(['Cannot use "row" and "rows" at the same time'])
|
||||
row = data["row"]
|
||||
if not isinstance(row, dict):
|
||||
return _errors(['"row" must be a dictionary'])
|
||||
rows = [row]
|
||||
data["return"] = True
|
||||
else:
|
||||
rows = data["rows"]
|
||||
if not isinstance(rows, list):
|
||||
return _errors(['"rows" must be a list'])
|
||||
for row in rows:
|
||||
if not isinstance(row, dict):
|
||||
return _errors(['"rows" must be a list of dictionaries'])
|
||||
|
||||
# Does this exceed max_insert_rows?
|
||||
max_insert_rows = self.ds.setting("max_insert_rows")
|
||||
if len(rows) > max_insert_rows:
|
||||
return _errors(
|
||||
["Too many rows, maximum allowed is {}".format(max_insert_rows)]
|
||||
)
|
||||
|
||||
# Validate other parameters
|
||||
extras = {
|
||||
key: value for key, value in data.items() if key not in ("row", "rows")
|
||||
}
|
||||
valid_extras = {"return", "ignore", "replace"}
|
||||
invalid_extras = extras.keys() - valid_extras
|
||||
if invalid_extras:
|
||||
return _errors(
|
||||
['Invalid parameter: "{}"'.format('", "'.join(sorted(invalid_extras)))]
|
||||
)
|
||||
if extras.get("ignore") and extras.get("replace"):
|
||||
return _errors(['Cannot use "ignore" and "replace" at the same time'])
|
||||
|
||||
# Validate columns of each row
|
||||
columns = set(await db.table_columns(table_name))
|
||||
for i, row in enumerate(rows):
|
||||
invalid_columns = set(row.keys()) - columns
|
||||
if invalid_columns:
|
||||
errors.append(
|
||||
"Row {} has invalid columns: {}".format(
|
||||
i, ", ".join(sorted(invalid_columns))
|
||||
)
|
||||
)
|
||||
if errors:
|
||||
return _errors(errors)
|
||||
return rows, errors, extras
|
||||
|
||||
async def post(self, request):
|
||||
try:
|
||||
resolved = await self.ds.resolve_table(request)
|
||||
except NotFound as e:
|
||||
return _error([e.args[0]], 404)
|
||||
db = resolved.db
|
||||
database_name = db.name
|
||||
table_name = resolved.table
|
||||
|
||||
# Table must exist (may handle table creation in the future)
|
||||
db = self.ds.get_database(database_name)
|
||||
if not await db.table_exists(table_name):
|
||||
return _error(["Table not found: {}".format(table_name)], 404)
|
||||
# Must have insert-row permission
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
rows, errors, extras = await self._validate_data(request, db, table_name)
|
||||
if errors:
|
||||
return _error(errors, 400)
|
||||
|
||||
ignore = extras.get("ignore")
|
||||
replace = extras.get("replace")
|
||||
|
||||
should_return = bool(extras.get("return", False))
|
||||
# Insert rows
|
||||
def insert_rows(conn):
|
||||
table = sqlite_utils.Database(conn)[table_name]
|
||||
if should_return:
|
||||
rowids = []
|
||||
for row in rows:
|
||||
rowids.append(
|
||||
table.insert(row, ignore=ignore, replace=replace).last_rowid
|
||||
)
|
||||
return list(
|
||||
table.rows_where(
|
||||
"rowid in ({})".format(",".join("?" for _ in rowids)),
|
||||
rowids,
|
||||
)
|
||||
)
|
||||
else:
|
||||
table.insert_all(rows, ignore=ignore, replace=replace)
|
||||
|
||||
try:
|
||||
rows = await db.execute_write_fn(insert_rows)
|
||||
except Exception as e:
|
||||
return _error([str(e)])
|
||||
result = {"ok": True}
|
||||
if should_return:
|
||||
result["rows"] = rows
|
||||
return Response.json(result, status=201)
|
||||
|
||||
|
||||
class TableDropView(BaseView):
|
||||
name = "table-drop"
|
||||
|
||||
def __init__(self, datasette):
|
||||
self.ds = datasette
|
||||
|
||||
async def post(self, request):
|
||||
try:
|
||||
resolved = await self.ds.resolve_table(request)
|
||||
except NotFound as e:
|
||||
return _error([e.args[0]], 404)
|
||||
db = resolved.db
|
||||
database_name = db.name
|
||||
table_name = resolved.table
|
||||
# Table must exist
|
||||
db = self.ds.get_database(database_name)
|
||||
if not await db.table_exists(table_name):
|
||||
return _error(["Table not found: {}".format(table_name)], 404)
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "drop-table", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(["Permission denied"], 403)
|
||||
if not db.is_mutable:
|
||||
return _error(["Database is immutable"], 403)
|
||||
confirm = False
|
||||
try:
|
||||
data = json.loads(await request.post_body())
|
||||
confirm = data.get("confirm")
|
||||
except json.JSONDecodeError as e:
|
||||
pass
|
||||
|
||||
if not confirm:
|
||||
return Response.json(
|
||||
{
|
||||
"ok": True,
|
||||
"database": database_name,
|
||||
"table": table_name,
|
||||
"row_count": (
|
||||
await db.execute("select count(*) from [{}]".format(table_name))
|
||||
).single_value(),
|
||||
"message": 'Pass "confirm": true to confirm',
|
||||
},
|
||||
status=200,
|
||||
)
|
||||
|
||||
# Drop table
|
||||
def drop_table(conn):
|
||||
sqlite_utils.Database(conn)[table_name].drop()
|
||||
|
||||
await db.execute_write_fn(drop_table)
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
|
|
|||
|
|
@ -333,6 +333,66 @@ To limit this ability for just one specific database, use this:
|
|||
}
|
||||
}
|
||||
|
||||
.. _CreateTokenView:
|
||||
|
||||
API Tokens
|
||||
==========
|
||||
|
||||
Datasette includes a default mechanism for generating API tokens that can be used to authenticate requests.
|
||||
|
||||
Authenticated users can create new API tokens using a form on the ``/-/create-token`` page.
|
||||
|
||||
Created tokens can then be passed in the ``Authorization: Bearer $token`` header of HTTP requests to Datasette.
|
||||
|
||||
A token created by a user will include that user's ``"id"`` in the token payload, so any permissions granted to that user based on their ID can be made available to the token as well.
|
||||
|
||||
Coming soon: a mechanism for creating tokens that can only perform a specified subset of the actions available to the user who created them.
|
||||
|
||||
When one of these a token accompanies a request, the actor for that request will have the following shape:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"id": "user_id",
|
||||
"token": "dstok",
|
||||
"token_expires": 1667717426
|
||||
}
|
||||
|
||||
The ``"id"`` field duplicates the ID of the actor who first created the token.
|
||||
|
||||
The ``"token"`` field identifies that this actor was authenticated using a Datasette signed token (``dstok``).
|
||||
|
||||
The ``"token_expires"`` field, if present, indicates that the token will expire after that integer timestamp.
|
||||
|
||||
The ``/-/create-token`` page cannot be accessed by actors that are authenticated with a ``"token": "some-value"`` property. This is to prevent API tokens from being used to create more tokens.
|
||||
|
||||
Datasette plugins that implement their own form of API token authentication should follow this convention.
|
||||
|
||||
You can disable the signed token feature entirely using the :ref:`allow_signed_tokens <setting_allow_signed_tokens>` setting.
|
||||
|
||||
.. _authentication_cli_create_token:
|
||||
|
||||
datasette create-token
|
||||
----------------------
|
||||
|
||||
You can also create tokens on the command line using the ``datasette create-token`` command.
|
||||
|
||||
This command takes one required argument - the ID of the actor to be associated with the created token.
|
||||
|
||||
You can specify an ``--expires-after`` option in seconds. If omitted, the token will never expire.
|
||||
|
||||
The command will sign the token using the ``DATASETTE_SECRET`` environment variable, if available. You can also pass the secret using the ``--secret`` option.
|
||||
|
||||
This means you can run the command locally to create tokens for use with a deployed Datasette instance, provided you know that instance's secret.
|
||||
|
||||
To create a token for the ``root`` actor that will expire in one hour::
|
||||
|
||||
datasette create-token root --expires-after 3600
|
||||
|
||||
To create a secret that never expires using a specific secret::
|
||||
|
||||
datasette create-token root --secret my-secret-goes-here
|
||||
|
||||
.. _permissions_plugins:
|
||||
|
||||
Checking permissions in plugins
|
||||
|
|
@ -505,6 +565,66 @@ Actor is allowed to view (and execute) a :ref:`canned query <canned_queries>` pa
|
|||
|
||||
Default *allow*.
|
||||
|
||||
.. _permissions_insert_row:
|
||||
|
||||
insert-row
|
||||
----------
|
||||
|
||||
Actor is allowed to insert rows into a table.
|
||||
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_delete_row:
|
||||
|
||||
delete-row
|
||||
----------
|
||||
|
||||
Actor is allowed to delete rows from a table.
|
||||
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_update_row:
|
||||
|
||||
update-row
|
||||
----------
|
||||
|
||||
Actor is allowed to update rows in a table.
|
||||
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_create_table:
|
||||
|
||||
create-table
|
||||
------------
|
||||
|
||||
Actor is allowed to create a database table.
|
||||
|
||||
``resource`` - string
|
||||
The name of the database
|
||||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_drop_table:
|
||||
|
||||
drop-table
|
||||
----------
|
||||
|
||||
Actor is allowed to drop a database table.
|
||||
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_execute_sql:
|
||||
|
||||
execute-sql
|
||||
|
|
|
|||
|
|
@ -4,6 +4,35 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
.. _v1_0_a0:
|
||||
|
||||
1.0a0 (2022-11-29)
|
||||
------------------
|
||||
|
||||
This first alpha release of Datasette 1.0 introduces a brand new collection of APIs for writing to the database (:issue:`1850`), as well as a new API token mechanism baked into Datasette core. Previously, API tokens have only been supported by installing additional plugins.
|
||||
|
||||
This is very much a preview: expect many more backwards incompatible API changes prior to the full 1.0 release.
|
||||
|
||||
Feedback enthusiastically welcomed, either through `issue comments <https://github.com/simonw/datasette/issues/1850>`__ or via the `Datasette Discord <https://datasette.io/discord>`__ community.
|
||||
|
||||
Signed API tokens
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
- New ``/-/create-token`` page allowing authenticated users to create signed API tokens that can act on their behalf, see :ref:`CreateTokenView`. (:issue:`1852`)
|
||||
- New ``datasette create-token`` command for creating tokens from the command line: :ref:`authentication_cli_create_token`.
|
||||
- New :ref:`setting_allow_signed_tokens` setting which can be used to turn off signed token support. (:issue:`1856`)
|
||||
- New :ref:`setting_max_signed_tokens_ttl` setting for restricting the maximum allowed duration of a signed token. (:issue:`1858`)
|
||||
|
||||
Write API
|
||||
~~~~~~~~~
|
||||
|
||||
- New API explorer at ``/-/api`` for trying out the API. (:issue:`1871`)
|
||||
- ``/db/-/create`` API for :ref:`TableCreateView`. (:issue:`1882`)
|
||||
- ``/db/table/-/insert`` API for :ref:`TableInsertView`. (:issue:`1851`)
|
||||
- ``/db/table/-/drop`` API for :ref:`TableDropView`. (:issue:`1874`)
|
||||
- ``/db/table/pk/-/update`` API for :ref:`RowUpdateView`. (:issue:`1863`)
|
||||
- ``/db/table/pk/-/delete`` API for :ref:`RowDeleteView`. (:issue:`1864`)
|
||||
|
||||
.. _v0_63_2:
|
||||
|
||||
0.63.2 (2022-11-18)
|
||||
|
|
|
|||
|
|
@ -47,13 +47,14 @@ Running ``datasette --help`` shows a list of all of the available commands.
|
|||
--help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
serve* Serve up specified SQLite database files with a web UI
|
||||
inspect Generate JSON summary of provided database files
|
||||
install Install plugins and packages from PyPI into the same...
|
||||
package Package SQLite files into a Datasette Docker container
|
||||
plugins List currently installed plugins
|
||||
publish Publish specified SQLite database files to the internet along...
|
||||
uninstall Uninstall plugins and Python packages from the Datasette...
|
||||
serve* Serve up specified SQLite database files with a web UI
|
||||
create-token Create a signed API token for the specified actor ID
|
||||
inspect Generate JSON summary of provided database files
|
||||
install Install plugins and packages from PyPI into the same...
|
||||
package Package SQLite files into a Datasette Docker container
|
||||
plugins List currently installed plugins
|
||||
publish Publish specified SQLite database files to the internet...
|
||||
uninstall Uninstall plugins and Python packages from the Datasette...
|
||||
|
||||
|
||||
.. [[[end]]]
|
||||
|
|
@ -212,6 +213,8 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam
|
|||
(default=100)
|
||||
max_returned_rows Maximum rows that can be returned from a table or
|
||||
custom query (default=1000)
|
||||
max_insert_rows Maximum rows that can be inserted at a time using
|
||||
the bulk insert API (default=100)
|
||||
num_sql_threads Number of threads in the thread pool for
|
||||
executing SQLite queries (default=3)
|
||||
sql_time_limit_ms Time limit for a SQL query in milliseconds
|
||||
|
|
@ -226,6 +229,10 @@ These can be passed to ``datasette serve`` using ``datasette serve --setting nam
|
|||
?_facet= parameter (default=True)
|
||||
allow_download Allow users to download the original SQLite
|
||||
database files (default=True)
|
||||
allow_signed_tokens Allow users to create and use signed API tokens
|
||||
(default=True)
|
||||
max_signed_tokens_ttl Maximum allowed expiry time for signed API tokens
|
||||
(default=0)
|
||||
suggest_facets Calculate and display suggested facets
|
||||
(default=True)
|
||||
default_cache_ttl Default HTTP cache TTL (used in Cache-Control:
|
||||
|
|
@ -591,3 +598,31 @@ This performance optimization is used automatically by some of the ``datasette p
|
|||
|
||||
|
||||
.. [[[end]]]
|
||||
|
||||
|
||||
.. _cli_help_create_token___help:
|
||||
|
||||
datasette create-token
|
||||
======================
|
||||
|
||||
Create a signed API token, see :ref:`authentication_cli_create_token`.
|
||||
|
||||
.. [[[cog
|
||||
help(["create-token", "--help"])
|
||||
.. ]]]
|
||||
|
||||
::
|
||||
|
||||
Usage: datasette create-token [OPTIONS] ID
|
||||
|
||||
Create a signed API token for the specified actor ID
|
||||
|
||||
Options:
|
||||
--secret TEXT Secret used for signing the API tokens
|
||||
[required]
|
||||
-e, --expires-after INTEGER Token should expire after this many seconds
|
||||
--debug Show decoded token
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
.. [[[end]]]
|
||||
|
|
|
|||
|
|
@ -579,6 +579,84 @@ For example:
|
|||
|
||||
downloads_are_allowed = datasette.setting("allow_download")
|
||||
|
||||
.. _datasette_resolve_database:
|
||||
|
||||
.resolve_database(request)
|
||||
--------------------------
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
A request object
|
||||
|
||||
If you are implementing your own custom views, you may need to resolve the database that the user is requesting based on a URL path. If the regular expression for your route declares a ``database`` named group, you can use this method to resolve the database object.
|
||||
|
||||
This returns a :ref:`Database <internals_database>` instance.
|
||||
|
||||
If the database cannot be found, it raises a ``datasette.utils.asgi.DatabaseNotFound`` exception - which is a subclass of ``datasette.utils.asgi.NotFound`` with a ``.database_name`` attribute set to the name of the database that was requested.
|
||||
|
||||
.. _datasette_resolve_table:
|
||||
|
||||
.resolve_table(request)
|
||||
-----------------------
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
A request object
|
||||
|
||||
This assumes that the regular expression for your route declares both a ``database`` and a ``table`` named group.
|
||||
|
||||
It returns a ``ResolvedTable`` named tuple instance with the following fields:
|
||||
|
||||
``db`` - :ref:`Database <internals_database>`
|
||||
The database object
|
||||
|
||||
``table`` - string
|
||||
The name of the table (or view)
|
||||
|
||||
``is_view`` - boolean
|
||||
``True`` if this is a view, ``False`` if it is a table
|
||||
|
||||
If the database or table cannot be found it raises a ``datasette.utils.asgi.DatabaseNotFound`` exception.
|
||||
|
||||
If the table does not exist it raises a ``datasette.utils.asgi.TableNotFound`` exception - a subclass of ``datasette.utils.asgi.NotFound`` with ``.database_name`` and ``.table`` attributes.
|
||||
|
||||
.. _datasette_resolve_row:
|
||||
|
||||
.resolve_row(request)
|
||||
---------------------
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
A request object
|
||||
|
||||
This method assumes your route declares named groups for ``database``, ``table`` and ``pks``.
|
||||
|
||||
It returns a ``ResolvedRow`` named tuple instance with the following fields:
|
||||
|
||||
``db`` - :ref:`Database <internals_database>`
|
||||
The database object
|
||||
|
||||
``table`` - string
|
||||
The name of the table
|
||||
|
||||
``sql`` - string
|
||||
SQL snippet that can be used in a ``WHERE`` clause to select the row
|
||||
|
||||
``params`` - dict
|
||||
Parameters that should be passed to the SQL query
|
||||
|
||||
``pks`` - list
|
||||
List of primary key column names
|
||||
|
||||
``pk_values`` - list
|
||||
List of primary key values decoded from the URL
|
||||
|
||||
``row`` - ``sqlite3.Row``
|
||||
The row itself
|
||||
|
||||
If the database or table cannot be found it raises a ``datasette.utils.asgi.DatabaseNotFound`` exception.
|
||||
|
||||
If the table does not exist it raises a ``datasette.utils.asgi.TableNotFound`` exception.
|
||||
|
||||
If the row cannot be found it raises a ``datasette.utils.asgi.RowNotFound`` exception. This has ``.database_name``, ``.table`` and ``.pk_values`` attributes, extracted from the request path.
|
||||
|
||||
.. _internals_datasette_client:
|
||||
|
||||
datasette.client
|
||||
|
|
@ -770,7 +848,7 @@ The ``Results`` object also has the following properties and methods:
|
|||
``.columns`` - list of strings
|
||||
A list of column names returned by the query.
|
||||
|
||||
``.rows`` - list of sqlite3.Row
|
||||
``.rows`` - list of ``sqlite3.Row``
|
||||
This property provides direct access to the list of rows returned by the database. You can access specific rows by index using ``results.rows[0]``.
|
||||
|
||||
``.first()`` - row or None
|
||||
|
|
@ -909,6 +987,9 @@ The ``Database`` class also provides properties and methods for introspecting th
|
|||
``await db.table_exists(table)`` - boolean
|
||||
Check if a table called ``table`` exists.
|
||||
|
||||
``await db.view_exists(view)`` - boolean
|
||||
Check if a view called ``view`` exists.
|
||||
|
||||
``await db.table_names()`` - list of strings
|
||||
List of names of tables in the database.
|
||||
|
||||
|
|
|
|||
|
|
@ -415,7 +415,9 @@ column - you can turn that off using ``?_labels=off``.
|
|||
|
||||
You can request foreign keys be expanded in JSON using the ``_labels=on`` or
|
||||
``_label=COLUMN`` special query string parameters. Here's what an expanded row
|
||||
looks like::
|
||||
looks like:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
[
|
||||
{
|
||||
|
|
@ -455,3 +457,307 @@ You can find this near the top of the source code of those pages, looking like t
|
|||
The JSON URL is also made available in a ``Link`` HTTP header for the page::
|
||||
|
||||
Link: https://latest.datasette.io/fixtures/sortable.json; rel="alternate"; type="application/json+datasette"
|
||||
|
||||
.. _json_api_write:
|
||||
|
||||
The JSON write API
|
||||
------------------
|
||||
|
||||
Datasette provides a write API for JSON data. This is a POST-only API that requires an authenticated API token, see :ref:`CreateTokenView`.
|
||||
|
||||
.. _TableInsertView:
|
||||
|
||||
Inserting rows
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
This requires the :ref:`permissions_insert_row` permission.
|
||||
|
||||
A single row can be inserted using the ``"row"`` key:
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/<table>/-/insert
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"row": {
|
||||
"column1": "value1",
|
||||
"column2": "value2"
|
||||
}
|
||||
}
|
||||
|
||||
If successful, this will return a ``201`` status code and the newly inserted row, for example:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"column1": "value1",
|
||||
"column2": "value2"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
To insert multiple rows at a time, use the same API method but send a list of dictionaries as the ``"rows"`` key:
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/<table>/-/insert
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"column1": "value1",
|
||||
"column2": "value2"
|
||||
},
|
||||
{
|
||||
"column1": "value3",
|
||||
"column2": "value4"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
If successful, this will return a ``201`` status code and an empty ``{}`` response body.
|
||||
|
||||
To return the newly inserted rows, add the ``"return": true`` key to the request body:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"column1": "value1",
|
||||
"column2": "value2"
|
||||
},
|
||||
{
|
||||
"column1": "value3",
|
||||
"column2": "value4"
|
||||
}
|
||||
],
|
||||
"return": true
|
||||
}
|
||||
|
||||
This will return the same ``"rows"`` key as the single row example above. There is a small performance penalty for using this option.
|
||||
|
||||
.. _RowUpdateView:
|
||||
|
||||
Updating a row
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
To update a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/update``. This requires the :ref:`permissions_update_row` permission.
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/<table>/<row-pks>/-/update
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"update": {
|
||||
"text_column": "New text string",
|
||||
"integer_column": 3,
|
||||
"float_column": 3.14
|
||||
}
|
||||
}
|
||||
|
||||
``<row-pks>`` here is the :ref:`tilde-encoded <internals_tilde_encoding>` primary key value of the row to delete - or a comma-separated list of primary key values if the table has a composite primary key.
|
||||
|
||||
You only need to pass the columns you want to update. Any other columns will be left unchanged.
|
||||
|
||||
If successful, this will return a ``200`` status code and a ``{"ok": true}`` response body.
|
||||
|
||||
Add ``"return": true`` to the request body to return the updated row:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"update": {
|
||||
"title": "New title"
|
||||
},
|
||||
"return": true
|
||||
}
|
||||
|
||||
The returned JSON will look like this:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ok": true,
|
||||
"row": {
|
||||
"id": 1,
|
||||
"title": "New title",
|
||||
"other_column": "Will be present here too"
|
||||
}
|
||||
}
|
||||
|
||||
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
||||
|
||||
.. _RowDeleteView:
|
||||
|
||||
Deleting a row
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
To delete a row, make a ``POST`` to ``/<database>/<table>/<row-pks>/-/delete``. This requires the :ref:`permissions_delete_row` permission.
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/<table>/<row-pks>/-/delete
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
``<row-pks>`` here is the :ref:`tilde-encoded <internals_tilde_encoding>` primary key value of the row to delete - or a comma-separated list of primary key values if the table has a composite primary key.
|
||||
|
||||
If successful, this will return a ``200`` status code and a ``{"ok": true}`` response body.
|
||||
|
||||
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
||||
|
||||
.. _TableCreateView:
|
||||
|
||||
Creating a table
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
To create a table, make a ``POST`` to ``/<database>/-/create``. This requires the :ref:`permissions_create_table` permission.
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/-/create
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"table": "name_of_new_table",
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"type": "integer"
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"type": "text"
|
||||
}
|
||||
],
|
||||
"pk": "id"
|
||||
}
|
||||
|
||||
The JSON here describes the table that will be created:
|
||||
|
||||
* ``table`` is the name of the table to create. This field is required.
|
||||
* ``columns`` is a list of columns to create. Each column is a dictionary with ``name`` and ``type`` keys.
|
||||
|
||||
- ``name`` is the name of the column. This is required.
|
||||
- ``type`` is the type of the column. This is optional - if not provided, ``text`` will be assumed. The valid types are ``text``, ``integer``, ``float`` and ``blob``.
|
||||
|
||||
* ``pk`` is the primary key for the table. This is optional - if not provided, Datasette will create a SQLite table with a hidden ``rowid`` column.
|
||||
|
||||
If the primary key is an integer column, it will be configured to automatically increment for each new record.
|
||||
|
||||
If you set this to ``id`` without including an ``id`` column in the list of ``columns``, Datasette will create an integer ID column for you.
|
||||
|
||||
* ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key.
|
||||
|
||||
If the table is successfully created this will return a ``201`` status code and the following response:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ok": true,
|
||||
"database": "data",
|
||||
"table": "name_of_new_table",
|
||||
"table_url": "http://127.0.0.1:8001/data/name_of_new_table",
|
||||
"table_api_url": "http://127.0.0.1:8001/data/name_of_new_table.json",
|
||||
"schema": "CREATE TABLE [name_of_new_table] (\n [id] INTEGER PRIMARY KEY,\n [title] TEXT\n)"
|
||||
}
|
||||
|
||||
.. _TableCreateView_example:
|
||||
|
||||
Creating a table from example data
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Instead of specifying ``columns`` directly you can instead pass a single example row or a list of rows. Datasette will create a table with a schema that matches those rows and insert them for you:
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/-/create
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"table": "creatures",
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Tarantula"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Kākāpō"
|
||||
}
|
||||
],
|
||||
"pk": "id"
|
||||
}
|
||||
|
||||
The ``201`` response here will be similar to the ``columns`` form, but will also include the number of rows that were inserted as ``row_count``:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ok": true,
|
||||
"database": "data",
|
||||
"table": "creatures",
|
||||
"table_url": "http://127.0.0.1:8001/data/creatures",
|
||||
"table_api_url": "http://127.0.0.1:8001/data/creatures.json",
|
||||
"schema": "CREATE TABLE [creatures] (\n [id] INTEGER PRIMARY KEY,\n [name] TEXT\n)",
|
||||
"row_count": 2
|
||||
}
|
||||
|
||||
.. _TableDropView:
|
||||
|
||||
Dropping tables
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
To drop a table, make a ``POST`` to ``/<database>/<table>/-/drop``. This requires the :ref:`permissions_drop_table` permission.
|
||||
|
||||
::
|
||||
|
||||
POST /<database>/<table>/-/drop
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer dstok_<rest-of-token>
|
||||
|
||||
Without a POST body this will return a status ``200`` with a note about how many rows will be deleted:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"ok": true,
|
||||
"database": "<database>",
|
||||
"table": "<table>",
|
||||
"row_count": 5,
|
||||
"message": "Pass \"confirm\": true to confirm"
|
||||
}
|
||||
|
||||
If you pass the following POST body:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"confirm": true
|
||||
}
|
||||
|
||||
Then the table will be dropped and a status ``200`` response of ``{"ok": true}`` will be returned.
|
||||
|
||||
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
||||
|
|
|
|||
|
|
@ -151,7 +151,10 @@ If you run ``datasette plugins --all`` it will include default plugins that ship
|
|||
"templates": false,
|
||||
"version": null,
|
||||
"hooks": [
|
||||
"permission_allowed"
|
||||
"actor_from_request",
|
||||
"permission_allowed",
|
||||
"register_commands",
|
||||
"skip_csrf"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -96,6 +96,17 @@ You can increase or decrease this limit like so::
|
|||
|
||||
datasette mydatabase.db --setting max_returned_rows 2000
|
||||
|
||||
.. _setting_max_insert_rows:
|
||||
|
||||
max_insert_rows
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Maximum rows that can be inserted at a time using the bulk insert API, see :ref:`TableInsertView`. Defaults to 100.
|
||||
|
||||
You can increase or decrease this limit like so::
|
||||
|
||||
datasette mydatabase.db --setting max_insert_rows 1000
|
||||
|
||||
.. _setting_num_sql_threads:
|
||||
|
||||
num_sql_threads
|
||||
|
|
@ -169,6 +180,34 @@ Should users be able to download the original SQLite database using a link on th
|
|||
|
||||
datasette mydatabase.db --setting allow_download off
|
||||
|
||||
.. _setting_allow_signed_tokens:
|
||||
|
||||
allow_signed_tokens
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Should users be able to create signed API tokens to access Datasette?
|
||||
|
||||
This is turned on by default. Use the following to turn it off::
|
||||
|
||||
datasette mydatabase.db --setting allow_signed_tokens off
|
||||
|
||||
Turning this setting off will disable the ``/-/create-token`` page, :ref:`described here <CreateTokenView>`. It will also cause any incoming ``Authorization: Bearer dstok_...`` API tokens to be ignored.
|
||||
|
||||
.. _setting_max_signed_tokens_ttl:
|
||||
|
||||
max_signed_tokens_ttl
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Maximum allowed expiry time for signed API tokens created by users.
|
||||
|
||||
Defaults to ``0`` which means no limit - tokens can be created that will never expire.
|
||||
|
||||
Set this to a value in seconds to limit the maximum expiry time. For example, to set that limit to 24 hours you would use::
|
||||
|
||||
datasette mydatabase.db --setting max_signed_tokens_ttl 86400
|
||||
|
||||
This setting is enforced when incoming tokens are processed.
|
||||
|
||||
.. _setting_default_cache_ttl:
|
||||
|
||||
default_cache_ttl
|
||||
|
|
|
|||
1
setup.py
1
setup.py
|
|
@ -57,6 +57,7 @@ setup(
|
|||
"PyYAML>=5.3",
|
||||
"mergedeep>=1.1.1",
|
||||
"itsdangerous>=1.1",
|
||||
"sqlite-utils>=3.30",
|
||||
],
|
||||
entry_points="""
|
||||
[console_scripts]
|
||||
|
|
|
|||
|
|
@ -129,10 +129,14 @@ def make_app_client(
|
|||
for sql, params in TABLE_PARAMETERIZED_SQL:
|
||||
with conn:
|
||||
conn.execute(sql, params)
|
||||
# Close the connection to avoid "too many open files" errors
|
||||
conn.close()
|
||||
if extra_databases is not None:
|
||||
for extra_filename, extra_sql in extra_databases.items():
|
||||
extra_filepath = os.path.join(tmpdir, extra_filename)
|
||||
sqlite3.connect(extra_filepath).executescript(extra_sql)
|
||||
c2 = sqlite3.connect(extra_filepath)
|
||||
c2.executescript(extra_sql)
|
||||
c2.close()
|
||||
# Insert at start to help test /-/databases ordering:
|
||||
files.insert(0, extra_filepath)
|
||||
os.chdir(os.path.dirname(filepath))
|
||||
|
|
|
|||
|
|
@ -808,8 +808,11 @@ def test_settings_json(app_client):
|
|||
"facet_suggest_time_limit_ms": 50,
|
||||
"facet_time_limit_ms": 200,
|
||||
"max_returned_rows": 100,
|
||||
"max_insert_rows": 100,
|
||||
"sql_time_limit_ms": 200,
|
||||
"allow_download": True,
|
||||
"allow_signed_tokens": True,
|
||||
"max_signed_tokens_ttl": 0,
|
||||
"allow_facet": True,
|
||||
"suggest_facets": True,
|
||||
"default_cache_ttl": 5,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,924 @@
|
|||
from datasette.app import Datasette
|
||||
from datasette.utils import sqlite3
|
||||
import pytest
|
||||
import time
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ds_write(tmp_path_factory):
|
||||
db_directory = tmp_path_factory.mktemp("dbs")
|
||||
db_path = str(db_directory / "data.db")
|
||||
db_path_immutable = str(db_directory / "immutable.db")
|
||||
db1 = sqlite3.connect(str(db_path))
|
||||
db2 = sqlite3.connect(str(db_path_immutable))
|
||||
for db in (db1, db2):
|
||||
db.execute("vacuum")
|
||||
db.execute(
|
||||
"create table docs (id integer primary key, title text, score float, age integer)"
|
||||
)
|
||||
ds = Datasette([db_path], immutables=[db_path_immutable])
|
||||
yield ds
|
||||
db.close()
|
||||
|
||||
|
||||
def write_token(ds, actor_id="root"):
|
||||
return "dstok_{}".format(
|
||||
ds.sign(
|
||||
{"a": actor_id, "token": "dstok", "t": int(time.time())}, namespace="token"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_write_row(ds_write):
|
||||
token = write_token(ds_write)
|
||||
response = await ds_write.client.post(
|
||||
"/data/docs/-/insert",
|
||||
json={"row": {"title": "Test", "score": 1.2, "age": 5}},
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
expected_row = {"id": 1, "title": "Test", "score": 1.2, "age": 5}
|
||||
assert response.status_code == 201
|
||||
assert response.json()["rows"] == [expected_row]
|
||||
rows = (await ds_write.get_database("data").execute("select * from docs")).rows
|
||||
assert dict(rows[0]) == expected_row
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("return_rows", (True, False))
|
||||
async def test_write_rows(ds_write, return_rows):
|
||||
token = write_token(ds_write)
|
||||
data = {
|
||||
"rows": [
|
||||
{"title": "Test {}".format(i), "score": 1.0, "age": 5} for i in range(20)
|
||||
]
|
||||
}
|
||||
if return_rows:
|
||||
data["return"] = True
|
||||
response = await ds_write.client.post(
|
||||
"/data/docs/-/insert",
|
||||
json=data,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 201
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
await ds_write.get_database("data").execute("select * from docs")
|
||||
).rows
|
||||
]
|
||||
assert len(actual_rows) == 20
|
||||
assert actual_rows == [
|
||||
{"id": i + 1, "title": "Test {}".format(i), "score": 1.0, "age": 5}
|
||||
for i in range(20)
|
||||
]
|
||||
assert response.json()["ok"] is True
|
||||
if return_rows:
|
||||
assert response.json()["rows"] == actual_rows
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"path,input,special_case,expected_status,expected_errors",
|
||||
(
|
||||
(
|
||||
"/data2/docs/-/insert",
|
||||
{},
|
||||
None,
|
||||
404,
|
||||
["Database not found: data2"],
|
||||
),
|
||||
(
|
||||
"/data/docs2/-/insert",
|
||||
{},
|
||||
None,
|
||||
404,
|
||||
["Table not found: docs2"],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"} for i in range(10)]},
|
||||
"bad_token",
|
||||
403,
|
||||
["Permission denied"],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{},
|
||||
"invalid_json",
|
||||
400,
|
||||
[
|
||||
"Invalid JSON: Expecting property name enclosed in double quotes: line 1 column 2 (char 1)"
|
||||
],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{},
|
||||
"invalid_content_type",
|
||||
400,
|
||||
["Invalid content-type, must be application/json"],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
[],
|
||||
None,
|
||||
400,
|
||||
["JSON must be a dictionary"],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"row": "blah"},
|
||||
None,
|
||||
400,
|
||||
['"row" must be a dictionary'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"blah": "blah"},
|
||||
None,
|
||||
400,
|
||||
['JSON must have one or other of "row" or "rows"'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": "blah"},
|
||||
None,
|
||||
400,
|
||||
['"rows" must be a list'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": ["blah"]},
|
||||
None,
|
||||
400,
|
||||
['"rows" must be a list of dictionaries'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"} for i in range(101)]},
|
||||
None,
|
||||
400,
|
||||
["Too many rows, maximum allowed is 100"],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"id": 1, "title": "Test"}]},
|
||||
"duplicate_id",
|
||||
400,
|
||||
["UNIQUE constraint failed: docs.id"],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"}], "ignore": True, "replace": True},
|
||||
None,
|
||||
400,
|
||||
['Cannot use "ignore" and "replace" at the same time'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"}], "invalid_param": True},
|
||||
None,
|
||||
400,
|
||||
['Invalid parameter: "invalid_param"'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"}], "one": True, "two": True},
|
||||
None,
|
||||
400,
|
||||
['Invalid parameter: "one", "two"'],
|
||||
),
|
||||
# Validate columns of each row
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test", "bad": 1, "worse": 2} for i in range(2)]},
|
||||
None,
|
||||
400,
|
||||
[
|
||||
"Row 0 has invalid columns: bad, worse",
|
||||
"Row 1 has invalid columns: bad, worse",
|
||||
],
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_write_row_errors(
|
||||
ds_write, path, input, special_case, expected_status, expected_errors
|
||||
):
|
||||
token = write_token(ds_write)
|
||||
if special_case == "duplicate_id":
|
||||
await ds_write.get_database("data").execute_write(
|
||||
"insert into docs (id) values (1)"
|
||||
)
|
||||
if special_case == "bad_token":
|
||||
token += "bad"
|
||||
kwargs = dict(
|
||||
json=input,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "text/plain"
|
||||
if special_case == "invalid_content_type"
|
||||
else "application/json",
|
||||
},
|
||||
)
|
||||
if special_case == "invalid_json":
|
||||
del kwargs["json"]
|
||||
kwargs["content"] = "{bad json"
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
**kwargs,
|
||||
)
|
||||
assert response.status_code == expected_status
|
||||
assert response.json()["ok"] is False
|
||||
assert response.json()["errors"] == expected_errors
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"ignore,replace,expected_rows",
|
||||
(
|
||||
(
|
||||
True,
|
||||
False,
|
||||
[
|
||||
{"id": 1, "title": "Exists", "score": None, "age": None},
|
||||
],
|
||||
),
|
||||
(
|
||||
False,
|
||||
True,
|
||||
[
|
||||
{"id": 1, "title": "One", "score": None, "age": None},
|
||||
],
|
||||
),
|
||||
),
|
||||
)
|
||||
@pytest.mark.parametrize("should_return", (True, False))
|
||||
async def test_insert_ignore_replace(
|
||||
ds_write, ignore, replace, expected_rows, should_return
|
||||
):
|
||||
await ds_write.get_database("data").execute_write(
|
||||
"insert into docs (id, title) values (1, 'Exists')"
|
||||
)
|
||||
token = write_token(ds_write)
|
||||
data = {"rows": [{"id": 1, "title": "One"}]}
|
||||
if ignore:
|
||||
data["ignore"] = True
|
||||
if replace:
|
||||
data["replace"] = True
|
||||
if should_return:
|
||||
data["return"] = True
|
||||
response = await ds_write.client.post(
|
||||
"/data/docs/-/insert",
|
||||
json=data,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 201
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
await ds_write.get_database("data").execute("select * from docs")
|
||||
).rows
|
||||
]
|
||||
assert actual_rows == expected_rows
|
||||
assert response.json()["ok"] is True
|
||||
if should_return:
|
||||
assert response.json()["rows"] == expected_rows
|
||||
|
||||
|
||||
async def _insert_row(ds):
|
||||
insert_response = await ds.client.post(
|
||||
"/data/docs/-/insert",
|
||||
json={"row": {"title": "Row one", "score": 1.2, "age": 5}, "return": True},
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(write_token(ds)),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert insert_response.status_code == 201
|
||||
return insert_response.json()["rows"][0]["id"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("scenario", ("no_token", "no_perm", "bad_table"))
|
||||
async def test_delete_row_errors(ds_write, scenario):
|
||||
if scenario == "no_token":
|
||||
token = "bad_token"
|
||||
elif scenario == "no_perm":
|
||||
token = write_token(ds_write, actor_id="not-root")
|
||||
else:
|
||||
token = write_token(ds_write)
|
||||
|
||||
pk = await _insert_row(ds_write)
|
||||
|
||||
path = "/data/{}/{}/-/delete".format(
|
||||
"docs" if scenario != "bad_table" else "bad_table", pk
|
||||
)
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 403 if scenario in ("no_token", "bad_token") else 404
|
||||
assert response.json()["ok"] is False
|
||||
assert (
|
||||
response.json()["errors"] == ["Permission denied"]
|
||||
if scenario == "no_token"
|
||||
else ["Table not found: bad_table"]
|
||||
)
|
||||
assert len((await ds_write.client.get("/data/docs.json?_shape=array")).json()) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"table,row_for_create,pks,delete_path",
|
||||
(
|
||||
("rowid_table", {"name": "rowid row"}, None, None),
|
||||
("pk_table", {"id": 1, "name": "ID table"}, "id", "1"),
|
||||
(
|
||||
"compound_pk_table",
|
||||
{"type": "article", "key": "k"},
|
||||
["type", "key"],
|
||||
"article,k",
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_delete_row(ds_write, table, row_for_create, pks, delete_path):
|
||||
# First create the table with that example row
|
||||
create_data = {
|
||||
"table": table,
|
||||
"row": row_for_create,
|
||||
}
|
||||
if pks:
|
||||
if isinstance(pks, str):
|
||||
create_data["pk"] = pks
|
||||
else:
|
||||
create_data["pks"] = pks
|
||||
create_response = await ds_write.client.post(
|
||||
"/data/-/create",
|
||||
json=create_data,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(write_token(ds_write)),
|
||||
},
|
||||
)
|
||||
assert create_response.status_code == 201, create_response.json()
|
||||
# Should be a single row
|
||||
assert (
|
||||
await ds_write.client.get(
|
||||
"/data.json?_shape=arrayfirst&sql=select+count(*)+from+{}".format(table)
|
||||
)
|
||||
).json() == [1]
|
||||
# Now delete the row
|
||||
if delete_path is None:
|
||||
# Special case for that rowid table
|
||||
delete_path = (
|
||||
await ds_write.client.get(
|
||||
"/data.json?_shape=arrayfirst&sql=select+rowid+from+{}".format(table)
|
||||
)
|
||||
).json()[0]
|
||||
|
||||
delete_response = await ds_write.client.post(
|
||||
"/data/{}/{}/-/delete".format(table, delete_path),
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(write_token(ds_write)),
|
||||
},
|
||||
)
|
||||
assert delete_response.status_code == 200
|
||||
assert (
|
||||
await ds_write.client.get(
|
||||
"/data.json?_shape=arrayfirst&sql=select+count(*)+from+{}".format(table)
|
||||
)
|
||||
).json() == [0]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("scenario", ("no_token", "no_perm", "bad_table"))
|
||||
async def test_update_row_check_permission(ds_write, scenario):
|
||||
if scenario == "no_token":
|
||||
token = "bad_token"
|
||||
elif scenario == "no_perm":
|
||||
token = write_token(ds_write, actor_id="not-root")
|
||||
else:
|
||||
token = write_token(ds_write)
|
||||
|
||||
pk = await _insert_row(ds_write)
|
||||
|
||||
path = "/data/{}/{}/-/delete".format(
|
||||
"docs" if scenario != "bad_table" else "bad_table", pk
|
||||
)
|
||||
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
json={"update": {"title": "New title"}},
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 403 if scenario in ("no_token", "bad_token") else 404
|
||||
assert response.json()["ok"] is False
|
||||
assert (
|
||||
response.json()["errors"] == ["Permission denied"]
|
||||
if scenario == "no_token"
|
||||
else ["Table not found: bad_table"]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"input,expected_errors",
|
||||
(
|
||||
({"title": "New title"}, None),
|
||||
({"title": None}, None),
|
||||
({"score": 1.6}, None),
|
||||
({"age": 10}, None),
|
||||
({"title": "New title", "score": 1.6}, None),
|
||||
({"title2": "New title"}, ["no such column: title2"]),
|
||||
),
|
||||
)
|
||||
@pytest.mark.parametrize("use_return", (True, False))
|
||||
async def test_update_row(ds_write, input, expected_errors, use_return):
|
||||
token = write_token(ds_write)
|
||||
pk = await _insert_row(ds_write)
|
||||
|
||||
path = "/data/docs/{}/-/update".format(pk)
|
||||
|
||||
data = {"update": input}
|
||||
if use_return:
|
||||
data["return"] = True
|
||||
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
json=data,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
if expected_errors:
|
||||
assert response.status_code == 400
|
||||
assert response.json()["ok"] is False
|
||||
assert response.json()["errors"] == expected_errors
|
||||
return
|
||||
|
||||
assert response.json()["ok"] is True
|
||||
if not use_return:
|
||||
assert "row" not in response.json()
|
||||
else:
|
||||
returned_row = response.json()["row"]
|
||||
assert returned_row["id"] == pk
|
||||
for k, v in input.items():
|
||||
assert returned_row[k] == v
|
||||
|
||||
# And fetch the row to check it's updated
|
||||
response = await ds_write.client.get(
|
||||
"/data/docs/{}.json?_shape=array".format(pk),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
row = response.json()[0]
|
||||
assert row["id"] == pk
|
||||
for k, v in input.items():
|
||||
assert row[k] == v
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"scenario", ("no_token", "no_perm", "bad_table", "has_perm", "immutable")
|
||||
)
|
||||
async def test_drop_table(ds_write, scenario):
|
||||
if scenario == "no_token":
|
||||
token = "bad_token"
|
||||
elif scenario == "no_perm":
|
||||
token = write_token(ds_write, actor_id="not-root")
|
||||
else:
|
||||
token = write_token(ds_write)
|
||||
should_work = scenario == "has_perm"
|
||||
await ds_write.get_database("data").execute_write(
|
||||
"insert into docs (id, title) values (1, 'Row 1')"
|
||||
)
|
||||
path = "/{database}/{table}/-/drop".format(
|
||||
database="immutable" if scenario == "immutable" else "data",
|
||||
table="docs" if scenario != "bad_table" else "bad_table",
|
||||
)
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
if not should_work:
|
||||
assert (
|
||||
response.status_code == 403
|
||||
if scenario in ("no_token", "bad_token")
|
||||
else 404
|
||||
)
|
||||
assert response.json()["ok"] is False
|
||||
expected_error = "Permission denied"
|
||||
if scenario == "bad_table":
|
||||
expected_error = "Table not found: bad_table"
|
||||
elif scenario == "immutable":
|
||||
expected_error = "Database is immutable"
|
||||
assert response.json()["errors"] == [expected_error]
|
||||
assert (await ds_write.client.get("/data/docs")).status_code == 200
|
||||
else:
|
||||
# It should show a confirmation page
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {
|
||||
"ok": True,
|
||||
"database": "data",
|
||||
"table": "docs",
|
||||
"row_count": 1,
|
||||
"message": 'Pass "confirm": true to confirm',
|
||||
}
|
||||
assert (await ds_write.client.get("/data/docs")).status_code == 200
|
||||
# Now send confirm: true
|
||||
response2 = await ds_write.client.post(
|
||||
path,
|
||||
json={"confirm": True},
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert response2.json() == {"ok": True}
|
||||
assert (await ds_write.client.get("/data/docs")).status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"input,expected_status,expected_response",
|
||||
(
|
||||
# Permission error with a bad token
|
||||
(
|
||||
{"table": "bad", "row": {"id": 1}},
|
||||
403,
|
||||
{"ok": False, "errors": ["Permission denied"]},
|
||||
),
|
||||
# Successful creation with columns:
|
||||
(
|
||||
{
|
||||
"table": "one",
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
},
|
||||
{
|
||||
"name": "title",
|
||||
"type": "text",
|
||||
},
|
||||
{
|
||||
"name": "score",
|
||||
"type": "integer",
|
||||
},
|
||||
{
|
||||
"name": "weight",
|
||||
"type": "float",
|
||||
},
|
||||
{
|
||||
"name": "thumbnail",
|
||||
"type": "blob",
|
||||
},
|
||||
],
|
||||
"pk": "id",
|
||||
},
|
||||
201,
|
||||
{
|
||||
"ok": True,
|
||||
"database": "data",
|
||||
"table": "one",
|
||||
"table_url": "http://localhost/data/one",
|
||||
"table_api_url": "http://localhost/data/one.json",
|
||||
"schema": (
|
||||
"CREATE TABLE [one] (\n"
|
||||
" [id] INTEGER PRIMARY KEY,\n"
|
||||
" [title] TEXT,\n"
|
||||
" [score] INTEGER,\n"
|
||||
" [weight] FLOAT,\n"
|
||||
" [thumbnail] BLOB\n"
|
||||
")"
|
||||
),
|
||||
},
|
||||
),
|
||||
# Successful creation with rows:
|
||||
(
|
||||
{
|
||||
"table": "two",
|
||||
"rows": [
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Row 1",
|
||||
"score": 1.5,
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Row 2",
|
||||
"score": 1.5,
|
||||
},
|
||||
],
|
||||
"pk": "id",
|
||||
},
|
||||
201,
|
||||
{
|
||||
"ok": True,
|
||||
"database": "data",
|
||||
"table": "two",
|
||||
"table_url": "http://localhost/data/two",
|
||||
"table_api_url": "http://localhost/data/two.json",
|
||||
"schema": (
|
||||
"CREATE TABLE [two] (\n"
|
||||
" [id] INTEGER PRIMARY KEY,\n"
|
||||
" [title] TEXT,\n"
|
||||
" [score] FLOAT\n"
|
||||
")"
|
||||
),
|
||||
"row_count": 2,
|
||||
},
|
||||
),
|
||||
# Successful creation with row:
|
||||
(
|
||||
{
|
||||
"table": "three",
|
||||
"row": {
|
||||
"id": 1,
|
||||
"title": "Row 1",
|
||||
"score": 1.5,
|
||||
},
|
||||
"pk": "id",
|
||||
},
|
||||
201,
|
||||
{
|
||||
"ok": True,
|
||||
"database": "data",
|
||||
"table": "three",
|
||||
"table_url": "http://localhost/data/three",
|
||||
"table_api_url": "http://localhost/data/three.json",
|
||||
"schema": (
|
||||
"CREATE TABLE [three] (\n"
|
||||
" [id] INTEGER PRIMARY KEY,\n"
|
||||
" [title] TEXT,\n"
|
||||
" [score] FLOAT\n"
|
||||
")"
|
||||
),
|
||||
"row_count": 1,
|
||||
},
|
||||
),
|
||||
# Create with row and no primary key
|
||||
(
|
||||
{
|
||||
"table": "four",
|
||||
"row": {
|
||||
"name": "Row 1",
|
||||
},
|
||||
},
|
||||
201,
|
||||
{
|
||||
"ok": True,
|
||||
"database": "data",
|
||||
"table": "four",
|
||||
"table_url": "http://localhost/data/four",
|
||||
"table_api_url": "http://localhost/data/four.json",
|
||||
"schema": ("CREATE TABLE [four] (\n" " [name] TEXT\n" ")"),
|
||||
"row_count": 1,
|
||||
},
|
||||
),
|
||||
# Create table with compound primary key
|
||||
(
|
||||
{
|
||||
"table": "five",
|
||||
"row": {"type": "article", "key": 123, "title": "Article 1"},
|
||||
"pks": ["type", "key"],
|
||||
},
|
||||
201,
|
||||
{
|
||||
"ok": True,
|
||||
"database": "data",
|
||||
"table": "five",
|
||||
"table_url": "http://localhost/data/five",
|
||||
"table_api_url": "http://localhost/data/five.json",
|
||||
"schema": (
|
||||
"CREATE TABLE [five] (\n [type] TEXT,\n [key] INTEGER,\n"
|
||||
" [title] TEXT,\n PRIMARY KEY ([type], [key])\n)"
|
||||
),
|
||||
"row_count": 1,
|
||||
},
|
||||
),
|
||||
# Error: Table is required
|
||||
(
|
||||
{
|
||||
"row": {"id": 1},
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Table is required"],
|
||||
},
|
||||
),
|
||||
# Error: Invalid table name
|
||||
(
|
||||
{
|
||||
"table": "sqlite_bad_name",
|
||||
"row": {"id": 1},
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Invalid table name"],
|
||||
},
|
||||
),
|
||||
# Error: JSON must be an object
|
||||
(
|
||||
[],
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["JSON must be an object"],
|
||||
},
|
||||
),
|
||||
# Error: Cannot specify columns with rows or row
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"columns": [{"name": "id", "type": "integer"}],
|
||||
"rows": [{"id": 1}],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Cannot specify columns with rows or row"],
|
||||
},
|
||||
),
|
||||
# Error: columns, rows or row is required
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["columns, rows or row is required"],
|
||||
},
|
||||
),
|
||||
# Error: columns must be a list
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"columns": {"name": "id", "type": "integer"},
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["columns must be a list"],
|
||||
},
|
||||
),
|
||||
# Error: columns must be a list of objects
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"columns": ["id"],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["columns must be a list of objects"],
|
||||
},
|
||||
),
|
||||
# Error: Column name is required
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"columns": [{"type": "integer"}],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Column name is required"],
|
||||
},
|
||||
),
|
||||
# Error: Unsupported column type
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"columns": [{"name": "id", "type": "bad"}],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Unsupported column type: bad"],
|
||||
},
|
||||
),
|
||||
# Error: Duplicate column name
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"columns": [
|
||||
{"name": "id", "type": "integer"},
|
||||
{"name": "id", "type": "integer"},
|
||||
],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Duplicate column name: id"],
|
||||
},
|
||||
),
|
||||
# Error: rows must be a list
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"rows": {"id": 1},
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["rows must be a list"],
|
||||
},
|
||||
),
|
||||
# Error: rows must be a list of objects
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"rows": ["id"],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["rows must be a list of objects"],
|
||||
},
|
||||
),
|
||||
# Error: pk must be a string
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"row": {"id": 1},
|
||||
"pk": 1,
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["pk must be a string"],
|
||||
},
|
||||
),
|
||||
# Error: Cannot specify both pk and pks
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"row": {"id": 1, "name": "Row 1"},
|
||||
"pk": "id",
|
||||
"pks": ["id", "name"],
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["Cannot specify both pk and pks"],
|
||||
},
|
||||
),
|
||||
# Error: pks must be a list
|
||||
(
|
||||
{
|
||||
"table": "bad",
|
||||
"row": {"id": 1, "name": "Row 1"},
|
||||
"pks": "id",
|
||||
},
|
||||
400,
|
||||
{
|
||||
"ok": False,
|
||||
"errors": ["pks must be a list"],
|
||||
},
|
||||
),
|
||||
# Error: pks must be a list of strings
|
||||
(
|
||||
{"table": "bad", "row": {"id": 1, "name": "Row 1"}, "pks": [1, 2]},
|
||||
400,
|
||||
{"ok": False, "errors": ["pks must be a list of strings"]},
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_create_table(ds_write, input, expected_status, expected_response):
|
||||
# Special case for expected status of 403
|
||||
if expected_status == 403:
|
||||
token = "bad_token"
|
||||
else:
|
||||
token = write_token(ds_write)
|
||||
response = await ds_write.client.post(
|
||||
"/data/-/create",
|
||||
json=input,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
assert response.status_code == expected_status
|
||||
data = response.json()
|
||||
assert data == expected_response
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
from .fixtures import app_client
|
||||
from click.testing import CliRunner
|
||||
from datasette.utils import baseconv
|
||||
from datasette.cli import cli
|
||||
import pytest
|
||||
import time
|
||||
|
||||
|
|
@ -110,3 +112,180 @@ def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(app_client, path):
|
|||
response = app_client.get(path + "?_bot=1")
|
||||
assert "<strong>bot</strong>" in response.text
|
||||
assert '<form action="/-/logout" method="post">' not in response.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"post_data,errors,expected_duration",
|
||||
(
|
||||
({"expire_type": ""}, [], None),
|
||||
({"expire_type": "x"}, ["Invalid expire duration"], None),
|
||||
({"expire_type": "minutes"}, ["Invalid expire duration"], None),
|
||||
(
|
||||
{"expire_type": "minutes", "expire_duration": "x"},
|
||||
["Invalid expire duration"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
{"expire_type": "minutes", "expire_duration": "-1"},
|
||||
["Invalid expire duration"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
{"expire_type": "minutes", "expire_duration": "0"},
|
||||
["Invalid expire duration"],
|
||||
None,
|
||||
),
|
||||
(
|
||||
{"expire_type": "minutes", "expire_duration": "10"},
|
||||
[],
|
||||
600,
|
||||
),
|
||||
(
|
||||
{"expire_type": "hours", "expire_duration": "10"},
|
||||
[],
|
||||
10 * 60 * 60,
|
||||
),
|
||||
(
|
||||
{"expire_type": "days", "expire_duration": "3"},
|
||||
[],
|
||||
60 * 60 * 24 * 3,
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_auth_create_token(app_client, post_data, errors, expected_duration):
|
||||
assert app_client.get("/-/create-token").status == 403
|
||||
ds_actor = app_client.actor_cookie({"id": "test"})
|
||||
response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor})
|
||||
assert response.status == 200
|
||||
assert ">Create an API token<" in response.text
|
||||
# Now try actually creating one
|
||||
response2 = app_client.post(
|
||||
"/-/create-token",
|
||||
post_data,
|
||||
csrftoken_from=True,
|
||||
cookies={"ds_actor": ds_actor},
|
||||
)
|
||||
assert response2.status == 200
|
||||
if errors:
|
||||
for error in errors:
|
||||
assert '<p class="message-error">{}</p>'.format(error) in response2.text
|
||||
else:
|
||||
# Extract token from page
|
||||
token = response2.text.split('value="dstok_')[1].split('"')[0]
|
||||
details = app_client.ds.unsign(token, "token")
|
||||
assert details.keys() == {"a", "t", "d"} or details.keys() == {"a", "t"}
|
||||
assert details["a"] == "test"
|
||||
if expected_duration is None:
|
||||
assert "d" not in details
|
||||
else:
|
||||
assert details["d"] == expected_duration
|
||||
# And test that token
|
||||
response3 = app_client.get(
|
||||
"/-/actor.json",
|
||||
headers={"Authorization": "Bearer {}".format("dstok_{}".format(token))},
|
||||
)
|
||||
assert response3.status == 200
|
||||
assert response3.json["actor"]["id"] == "test"
|
||||
|
||||
|
||||
def test_auth_create_token_not_allowed_for_tokens(app_client):
|
||||
ds_tok = app_client.ds.sign({"a": "test", "token": "dstok"}, "token")
|
||||
response = app_client.get(
|
||||
"/-/create-token",
|
||||
headers={"Authorization": "Bearer dstok_{}".format(ds_tok)},
|
||||
)
|
||||
assert response.status == 403
|
||||
|
||||
|
||||
def test_auth_create_token_not_allowed_if_allow_signed_tokens_off(app_client):
|
||||
app_client.ds._settings["allow_signed_tokens"] = False
|
||||
try:
|
||||
ds_actor = app_client.actor_cookie({"id": "test"})
|
||||
response = app_client.get("/-/create-token", cookies={"ds_actor": ds_actor})
|
||||
assert response.status == 403
|
||||
finally:
|
||||
app_client.ds._settings["allow_signed_tokens"] = True
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"scenario,should_work",
|
||||
(
|
||||
("allow_signed_tokens_off", False),
|
||||
("no_token", False),
|
||||
("no_timestamp", False),
|
||||
("invalid_token", False),
|
||||
("expired_token", False),
|
||||
("valid_unlimited_token", True),
|
||||
("valid_expiring_token", True),
|
||||
),
|
||||
)
|
||||
def test_auth_with_dstok_token(app_client, scenario, should_work):
|
||||
token = None
|
||||
_time = int(time.time())
|
||||
if scenario in ("valid_unlimited_token", "allow_signed_tokens_off"):
|
||||
token = app_client.ds.sign({"a": "test", "t": _time}, "token")
|
||||
elif scenario == "valid_expiring_token":
|
||||
token = app_client.ds.sign({"a": "test", "t": _time - 50, "d": 1000}, "token")
|
||||
elif scenario == "expired_token":
|
||||
token = app_client.ds.sign({"a": "test", "t": _time - 2000, "d": 1000}, "token")
|
||||
elif scenario == "no_timestamp":
|
||||
token = app_client.ds.sign({"a": "test"}, "token")
|
||||
elif scenario == "invalid_token":
|
||||
token = "invalid"
|
||||
if token:
|
||||
token = "dstok_{}".format(token)
|
||||
if scenario == "allow_signed_tokens_off":
|
||||
app_client.ds._settings["allow_signed_tokens"] = False
|
||||
headers = {}
|
||||
if token:
|
||||
headers["Authorization"] = "Bearer {}".format(token)
|
||||
response = app_client.get("/-/actor.json", headers=headers)
|
||||
try:
|
||||
if should_work:
|
||||
assert response.json.keys() == {"actor"}
|
||||
actor = response.json["actor"]
|
||||
expected_keys = {"id", "token"}
|
||||
if scenario != "valid_unlimited_token":
|
||||
expected_keys.add("token_expires")
|
||||
assert actor.keys() == expected_keys
|
||||
assert actor["id"] == "test"
|
||||
assert actor["token"] == "dstok"
|
||||
if scenario != "valid_unlimited_token":
|
||||
assert isinstance(actor["token_expires"], int)
|
||||
else:
|
||||
assert response.json == {"actor": None}
|
||||
finally:
|
||||
app_client.ds._settings["allow_signed_tokens"] = True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("expires", (None, 1000, -1000))
|
||||
def test_cli_create_token(app_client, expires):
|
||||
secret = app_client.ds._secret
|
||||
runner = CliRunner(mix_stderr=False)
|
||||
args = ["create-token", "--secret", secret, "test"]
|
||||
if expires:
|
||||
args += ["--expires-after", str(expires)]
|
||||
result = runner.invoke(cli, args)
|
||||
assert result.exit_code == 0
|
||||
token = result.output.strip()
|
||||
assert token.startswith("dstok_")
|
||||
details = app_client.ds.unsign(token[len("dstok_") :], "token")
|
||||
expected_keys = {"a", "token", "t"}
|
||||
if expires:
|
||||
expected_keys.add("d")
|
||||
assert details.keys() == expected_keys
|
||||
assert details["a"] == "test"
|
||||
response = app_client.get(
|
||||
"/-/actor.json", headers={"Authorization": "Bearer {}".format(token)}
|
||||
)
|
||||
if expires is None or expires > 0:
|
||||
expected_actor = {
|
||||
"id": "test",
|
||||
"token": "dstok",
|
||||
}
|
||||
if expires and expires > 0:
|
||||
expected_actor["token_expires"] = details["t"] + expires
|
||||
assert response.json == {"actor": expected_actor}
|
||||
else:
|
||||
expected_actor = None
|
||||
assert response.json == {"actor": expected_actor}
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ def documented_views():
|
|||
if first_word.endswith("View"):
|
||||
view_labels.add(first_word)
|
||||
# We deliberately don't document these:
|
||||
view_labels.update(("PatternPortfolioView", "AuthTokenView"))
|
||||
view_labels.update(("PatternPortfolioView", "AuthTokenView", "ApiExplorerView"))
|
||||
return view_labels
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -78,6 +78,19 @@ async def test_table_exists(db, tables, exists):
|
|||
assert exists == actual
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"view,expected",
|
||||
(
|
||||
("not_a_view", False),
|
||||
("paginated_view", True),
|
||||
),
|
||||
)
|
||||
@pytest.mark.asyncio
|
||||
async def test_view_exists(db, view, expected):
|
||||
actual = await db.view_exists(view)
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"table,expected",
|
||||
(
|
||||
|
|
@ -400,6 +413,17 @@ async def test_table_names(db):
|
|||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_view_names(db):
|
||||
view_names = await db.view_names()
|
||||
assert view_names == [
|
||||
"paginated_view",
|
||||
"simple_view",
|
||||
"searchable_view",
|
||||
"searchable_view_configured_by_metadata",
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_write_block_true(db):
|
||||
await db.execute_write(
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from datasette.app import Datasette
|
||||
from .fixtures import app_client, assert_permissions_checked, make_app_client
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
import copy
|
||||
import json
|
||||
import pytest_asyncio
|
||||
import pytest
|
||||
import re
|
||||
import urllib
|
||||
|
|
@ -21,6 +23,18 @@ def padlock_client():
|
|||
yield client
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def perms_ds():
|
||||
ds = Datasette()
|
||||
await ds.invoke_startup()
|
||||
one = ds.add_memory_database("perms_ds_one")
|
||||
two = ds.add_memory_database("perms_ds_two")
|
||||
await one.execute_write("create table if not exists t1 (id integer primary key)")
|
||||
await one.execute_write("create table if not exists t2 (id integer primary key)")
|
||||
await two.execute_write("create table if not exists t1 (id integer primary key)")
|
||||
return ds
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"allow,expected_anon,expected_auth",
|
||||
[
|
||||
|
|
@ -260,6 +274,7 @@ def test_execute_sql(metadata):
|
|||
schema_json = schema_re.search(response_text).group(1)
|
||||
schema = json.loads(schema_json)
|
||||
assert set(schema["attraction_characteristic"]) == {"name", "pk"}
|
||||
assert schema["paginated_view"] == []
|
||||
assert form_fragment in response_text
|
||||
query_response = client.get("/fixtures?sql=select+1", cookies=cookies)
|
||||
assert query_response.status == 200
|
||||
|
|
@ -540,3 +555,88 @@ def test_padlocks_on_database_page(cascade_app_client):
|
|||
assert ">simple_view</a></li>" in response.text
|
||||
finally:
|
||||
cascade_app_client.ds._metadata_local = previous_metadata
|
||||
|
||||
|
||||
DEF = "USE_DEFAULT"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"actor,permission,resource_1,resource_2,expected_result",
|
||||
(
|
||||
# Without restrictions the defaults apply
|
||||
({"id": "t"}, "view-instance", None, None, DEF),
|
||||
({"id": "t"}, "view-database", "one", None, DEF),
|
||||
({"id": "t"}, "view-table", "one", "t1", DEF),
|
||||
# If there is an _r block, everything gets denied unless explicitly allowed
|
||||
({"id": "t", "_r": {}}, "view-instance", None, None, False),
|
||||
({"id": "t", "_r": {}}, "view-database", "one", None, False),
|
||||
({"id": "t", "_r": {}}, "view-table", "one", "t1", False),
|
||||
# Explicit allowing works at the "a" for all level:
|
||||
({"id": "t", "_r": {"a": ["vi"]}}, "view-instance", None, None, DEF),
|
||||
({"id": "t", "_r": {"a": ["vd"]}}, "view-database", "one", None, DEF),
|
||||
({"id": "t", "_r": {"a": ["vt"]}}, "view-table", "one", "t1", DEF),
|
||||
# But not if it's the wrong permission
|
||||
({"id": "t", "_r": {"a": ["vd"]}}, "view-instance", None, None, False),
|
||||
({"id": "t", "_r": {"a": ["vi"]}}, "view-database", "one", None, False),
|
||||
({"id": "t", "_r": {"a": ["vd"]}}, "view-table", "one", "t1", False),
|
||||
# Works at the "d" for database level:
|
||||
({"id": "t", "_r": {"d": {"one": ["vd"]}}}, "view-database", "one", None, DEF),
|
||||
(
|
||||
{"id": "t", "_r": {"d": {"one": ["vdd"]}}},
|
||||
"view-database-download",
|
||||
"one",
|
||||
None,
|
||||
DEF,
|
||||
),
|
||||
({"id": "t", "_r": {"d": {"one": ["es"]}}}, "execute-sql", "one", None, DEF),
|
||||
# Works at the "t" for table level:
|
||||
(
|
||||
{"id": "t", "_r": {"t": {"one": {"t1": ["vt"]}}}},
|
||||
"view-table",
|
||||
"one",
|
||||
"t1",
|
||||
DEF,
|
||||
),
|
||||
(
|
||||
{"id": "t", "_r": {"t": {"one": {"t1": ["vt"]}}}},
|
||||
"view-table",
|
||||
"one",
|
||||
"t2",
|
||||
False,
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_actor_restricted_permissions(
|
||||
perms_ds, actor, permission, resource_1, resource_2, expected_result
|
||||
):
|
||||
cookies = {"ds_actor": perms_ds.sign({"a": {"id": "root"}}, "actor")}
|
||||
csrftoken = (await perms_ds.client.get("/-/permissions", cookies=cookies)).cookies[
|
||||
"ds_csrftoken"
|
||||
]
|
||||
cookies["ds_csrftoken"] = csrftoken
|
||||
response = await perms_ds.client.post(
|
||||
"/-/permissions",
|
||||
data={
|
||||
"actor": json.dumps(actor),
|
||||
"permission": permission,
|
||||
"resource_1": resource_1,
|
||||
"resource_2": resource_2,
|
||||
"csrftoken": csrftoken,
|
||||
},
|
||||
cookies=cookies,
|
||||
)
|
||||
expected_resource = []
|
||||
if resource_1:
|
||||
expected_resource.append(resource_1)
|
||||
if resource_2:
|
||||
expected_resource.append(resource_2)
|
||||
if len(expected_resource) == 1:
|
||||
expected_resource = expected_resource[0]
|
||||
expected = {
|
||||
"actor": actor,
|
||||
"permission": permission,
|
||||
"resource": expected_resource,
|
||||
"result": expected_result,
|
||||
}
|
||||
assert response.json() == expected
|
||||
|
|
|
|||
|
|
@ -971,6 +971,7 @@ def test_hook_register_commands():
|
|||
"plugins",
|
||||
"publish",
|
||||
"uninstall",
|
||||
"create-token",
|
||||
}
|
||||
|
||||
# Now install a plugin
|
||||
|
|
@ -1001,6 +1002,7 @@ def test_hook_register_commands():
|
|||
"uninstall",
|
||||
"verify",
|
||||
"unverify",
|
||||
"create-token",
|
||||
}
|
||||
pm.unregister(name="verify")
|
||||
importlib.reload(cli)
|
||||
|
|
|
|||
Ładowanie…
Reference in New Issue