kopia lustrzana https://github.com/simonw/datasette
Porównaj commity
168 Commity
5639f9d943
...
a98450f9d4
Autor | SHA1 | Data |
---|---|---|
Michele Stravs | a98450f9d4 | |
Simon Willison | c92f326ed1 | |
Simon Willison | feddd61789 | |
Simon Willison | 9cc6f1908f | |
Simon Willison | e088abdb46 | |
Simon Willison | 828ef9899f | |
Simon Willison | 8d456aae45 | |
Simon Willison | b8711988b9 | |
Simon Willison | 7339cc51de | |
Simon Willison | 06281a0b8e | |
Simon Willison | 909c85cd2b | |
Simon Willison | daf5ca02ca | |
Simon Willison | 7b32d5f7d8 | |
Simon Willison | 7818e8b9d1 | |
Simon Willison | a395256c8c | |
Simon Willison | 090dff542b | |
Simon Willison | c6e8a4a76c | |
Simon Willison | 4d24bf6b34 | |
Simon Willison | 5de6797d4a | |
Simon Willison | 86335dc722 | |
Simon Willison | 57c1ce0e8b | |
Simon Willison | 6ec0081f5d | |
Simon Willison | f99c2f5f8c | |
Simon Willison | c863443ea1 | |
Simon Willison | dfd4ad558b | |
Simon Willison | 434123425f | |
Jeroen Van Goey | 103b4decbd | |
dependabot[bot] | 158d5d96e9 | |
Simon Willison | 28bf3a933f | |
Simon Willison | 26300738e3 | |
Simon Willison | 27409a7892 | |
Simon Willison | 392ca2e24c | |
Simon Willison | b36a2d8f4b | |
Simon Willison | 3856a8cb24 | |
Simon Willison | 81629dbeff | |
Simon Willison | a4fa1ef3bd | |
Simon Willison | 10f9ba1a00 | |
Simon Willison | 5e0e440f2c | |
Simon Willison | e1c80efff8 | |
Simon Willison | 9906f937d9 | |
Simon Willison | 3a999a85fb | |
Simon Willison | 244f3ff83a | |
Simon Willison | 8bfa3a51c2 | |
Simon Willison | 232a30459b | |
Simon Willison | 47e29e948b | |
Simon Willison | 97de4d6362 | |
Simon Willison | b89cac3b6a | |
Simon Willison | 5d79974186 | |
Simon Willison | 398a92cf1e | |
Simon Willison | bd9ed62e5d | |
Simon Willison | dcd9ea3622 | |
Simon Willison | c62cfa6de8 | |
Simon Willison | c954795f9a | |
Simon Willison | 4e944c29e4 | |
Simon Willison | 528d89d1a3 | |
Simon Willison | b5ccc4d608 | |
Simon Willison | 574687834f | |
Simon Willison | 900d15bcb8 | |
Simon Willison | 569aacd39b | |
Simon Willison | 9989f25709 | |
Simon Willison | e0794ddd52 | |
Simon Willison | 1e31821d9f | |
Simon Willison | df8d1c055a | |
Simon Willison | d0089ba776 | |
Simon Willison | c64453a4a1 | |
Simon Willison | ad01f9d321 | |
Simon Willison | 9ac9f0152f | |
Simon Willison | 60c6692f68 | |
Simon Willison | 52a1dac5d2 | |
Simon Willison | f049103852 | |
Simon Willison | 69c6e95323 | |
Simon Willison | 5d21057cf1 | |
Simon Willison | 5a63ecc557 | |
Simon Willison | 1e901aa690 | |
Simon Willison | 85a1dfe6e0 | |
Simon Willison | efc7357554 | |
Simon Willison | 503545b203 | |
Simon Willison | 7219a56d1e | |
Simon Willison | 5ea7098e4d | |
Simon Willison | 4ea109ac4d | |
Simon Willison | 6ccef35cc9 | |
Simon Willison | be4f02335f | |
Simon Willison | d4bc2b2dfc | |
Simon Willison | 4da581d09b | |
Simon Willison | b466749e88 | |
Simon Willison | bcf7ef963f | |
Simon Willison | 2e4a03b2c4 | |
Simon Willison | bcc4f6bf1f | |
dependabot[bot] | 890615b3f2 | |
Simon Willison | 959e020297 | |
gerrymanoim | 04e8835297 | |
Forest Gregg | b8230694ff | |
Simon Willison | 5c64af6936 | |
Simon Willison | c3caf36af7 | |
Simon Willison | 7a5adb592a | |
Simon Willison | a25bf6bea7 | |
Simon Willison | 0f63cb83ed | |
Simon Willison | 7506a89be0 | |
Simon Willison | 48148e66a8 | |
Simon Willison | 2ff4d4a60a | |
Simon Willison | 0b2c6a7ebd | |
Simon Willison | 1fc76fee62 | |
Simon Willison | c7a4706bcc | |
Simon Willison | 45b88f2056 | |
Simon Willison | 872dae1e1a | |
Simon Willison | 978249beda | |
Simon Willison | 4284c74bc1 | |
Simon Willison | 89c8ca0f3f | |
Simon Willison | 067cc75dfa | |
Cameron Yick | 452a587e23 | |
Simon Willison | 4b534b89a5 | |
Simon Willison | 11f7fd38a4 | |
Simon Willison | a4b401f470 | |
Alex Garcia | 3d6d1e3050 | |
Alex Garcia | 35deaabcb1 | |
Simon Willison | 4e1188f60f | |
Simon Willison | 85a41987c7 | |
Simon Willison | d51e63d3bb | |
Simon Willison | 836b1587f0 | |
Simon Willison | e4f868801a | |
Simon Willison | f130c7c0a8 | |
Simon Willison | 2da1a6acec | |
Simon Willison | b7cf0200e2 | |
Simon Willison | 80a9cd9620 | |
Simon Willison | b0d0a0e5de | |
Simon Willison | 947520c1fe | |
Simon Willison | 10bc805473 | |
dependabot[bot] | 6763572948 | |
Simon Willison | b0e5d8afa3 | |
Simon Willison | 6ed7908580 | |
Simon Willison | f56e043747 | |
Simon Willison | 852f501485 | |
Simon Willison | 16f0b6d822 | |
Alex Garcia | b2ec8717c3 | |
Simon Willison | a4c96d01b2 | |
Simon Willison | b645174271 | |
Simon Willison | c26370485a | |
Simon Willison | ab040470e2 | |
Simon Willison | dbfad6d220 | |
Simon Willison | 2200abfa17 | |
Simon Willison | fbcb103c0c | |
dependabot[bot] | e4abae3fd7 | |
Simon Willison | e86eaaa4f3 | |
Simon Willison | 05707aa16b | |
Simon Willison | 31d5c4ec05 | |
Simon Willison | fd083e37ec | |
Simon Willison | 98ffad9aed | |
Simon Willison | 9cead33fb9 | |
Simon Willison | 4c3ef03311 | |
Simon Willison | 2caa53a52a | |
Simon Willison | 6bfe104d47 | |
Simon Willison | 30b28c8367 | |
Simon Willison | bb12229794 | |
Simon Willison | 50da908213 | |
Simon Willison | a1f3d75a52 | |
Alex Garcia | 92b8bf38c0 | |
dependabot[bot] | d28f12092d | |
Simon Willison | 2e2825869f | |
Simon Willison | d8351b08ed | |
Simon Willison | d9aad1fd04 | |
Simon Willison | 527cec66b0 | |
Simon Willison | bdf59eb7db | |
Simon Willison | 64fd1d788e | |
Simon Willison | 2ce7872e3b | |
Alex Garcia | 17ec309e14 | |
Simon Willison | 01e0558825 | |
Simon Willison | 943df09dcc | |
Simon Willison | 4535568f2c |
|
@ -17,6 +17,7 @@ jobs:
|
|||
uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
# gcloud commmand breaks on higher Python versions, so stick with 3.9:
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- uses: actions/cache@v3
|
||||
|
@ -37,8 +38,14 @@ jobs:
|
|||
run: |
|
||||
pytest -n auto -m "not serial"
|
||||
pytest -m "serial"
|
||||
- name: Build fixtures.db
|
||||
run: python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db
|
||||
- name: Build fixtures.db and other files needed to deploy the demo
|
||||
run: |-
|
||||
python tests/fixtures.py \
|
||||
fixtures.db \
|
||||
fixtures-config.json \
|
||||
fixtures-metadata.json \
|
||||
plugins \
|
||||
--extra-db-filename extra_database.db
|
||||
- name: Build docs.db
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |-
|
||||
|
@ -87,13 +94,13 @@ jobs:
|
|||
}
|
||||
return queries
|
||||
EOF
|
||||
- name: Make some modifications to metadata.json
|
||||
run: |
|
||||
cat fixtures.json | \
|
||||
jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \
|
||||
jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
||||
> metadata.json
|
||||
cat metadata.json
|
||||
# - name: Make some modifications to metadata.json
|
||||
# run: |
|
||||
# cat fixtures.json | \
|
||||
# jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \
|
||||
# jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \
|
||||
# > metadata.json
|
||||
# cat metadata.json
|
||||
- name: Set up Cloud Run
|
||||
uses: google-github-actions/setup-gcloud@v0
|
||||
with:
|
||||
|
@ -111,7 +118,7 @@ jobs:
|
|||
# Replace 1.0 with one-dot-zero in SUFFIX
|
||||
export SUFFIX=${SUFFIX//1.0/one-dot-zero}
|
||||
datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \
|
||||
-m metadata.json \
|
||||
-m fixtures-metadata.json \
|
||||
--plugins-dir=plugins \
|
||||
--branch=$GITHUB_SHA \
|
||||
--version-note=$GITHUB_SHA \
|
||||
|
|
|
@ -12,20 +12,15 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
cache: pip
|
||||
cache-dependency-path: setup.py
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[test]'
|
||||
|
@ -36,47 +31,38 @@ jobs:
|
|||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test]
|
||||
environment: release
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-publish-pip-
|
||||
python-version: '3.12'
|
||||
cache: pip
|
||||
cache-dependency-path: setup.py
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install setuptools wheel twine
|
||||
- name: Publish
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
pip install setuptools wheel build
|
||||
- name: Build
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
python -m build
|
||||
- name: Publish
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
|
||||
deploy_static_docs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [deploy]
|
||||
if: "!github.event.release.prerelease"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- uses: actions/cache@v2
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-publish-pip-
|
||||
cache: pip
|
||||
cache-dependency-path: setup.py
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install -e .[docs]
|
||||
|
@ -105,7 +91,7 @@ jobs:
|
|||
needs: [deploy]
|
||||
if: "!github.event.release.prerelease"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build and push to Docker Hub
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
|
|
|
@ -9,18 +9,13 @@ jobs:
|
|||
spellcheck:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- uses: actions/cache@v2
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/setup.py'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[docs]'
|
||||
|
@ -29,3 +24,4 @@ jobs:
|
|||
codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
||||
codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
||||
codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
||||
codespell tests --ignore-words docs/codespell-ignore-words.txt
|
||||
|
|
|
@ -15,18 +15,13 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out datasette
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- uses: actions/cache@v2
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
python-version: '3.12'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '**/setup.py'
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
|
@ -10,26 +10,22 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- uses: actions/cache@v3
|
||||
name: Configure pip caching
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
allow-prereleases: true
|
||||
cache: pip
|
||||
cache-dependency-path: setup.py
|
||||
- name: Build extension for --load-extension test
|
||||
run: |-
|
||||
(cd tests && gcc ext.c -fPIC -shared -o ext.so)
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install -e '.[test,docs]'
|
||||
pip install -e '.[test]'
|
||||
pip freeze
|
||||
- name: Run tests
|
||||
run: |
|
||||
|
@ -37,10 +33,20 @@ jobs:
|
|||
pytest -m "serial"
|
||||
# And the test that exceeds a localhost HTTPS server
|
||||
tests/test_datasette_https_server.sh
|
||||
- name: Install docs dependencies on Python 3.9+
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
pip install -e '.[docs]'
|
||||
- name: Check if cog needs to be run
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
cog --check docs/*.rst
|
||||
- name: Check if blacken-docs needs to be run
|
||||
if: matrix.python-version != '3.8'
|
||||
run: |
|
||||
# This fails on syntax errors, or a diff was applied
|
||||
blacken-docs -l 60 docs/*.rst
|
||||
- name: Test DATASETTE_LOAD_PLUGINS
|
||||
run: |
|
||||
pip install datasette-init datasette-json-html
|
||||
tests/test-datasette-load-plugins.sh
|
||||
|
|
1
Justfile
1
Justfile
|
@ -15,6 +15,7 @@ export DATASETTE_SECRET := "not_a_secret"
|
|||
pipenv run codespell README.md --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt
|
||||
pipenv run tests --ignore-words docs/codespell-ignore-words.txt
|
||||
|
||||
# Run linters: black, flake8, mypy, cog
|
||||
@lint: codespell
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
<img src="https://datasette.io/static/datasette-logo.svg" alt="Datasette">
|
||||
|
||||
[![PyPI](https://img.shields.io/pypi/v/datasette.svg)](https://pypi.org/project/datasette/)
|
||||
[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/stable/changelog.html)
|
||||
[![Changelog](https://img.shields.io/github/v/release/simonw/datasette?label=changelog)](https://docs.datasette.io/en/latest/changelog.html)
|
||||
[![Python 3.x](https://img.shields.io/pypi/pyversions/datasette.svg?logo=python&logoColor=white)](https://pypi.org/project/datasette/)
|
||||
[![Tests](https://github.com/simonw/datasette/workflows/Test/badge.svg)](https://github.com/simonw/datasette/actions?query=workflow%3ATest)
|
||||
[![Documentation Status](https://readthedocs.org/projects/datasette/badge/?version=latest)](https://docs.datasette.io/en/latest/?badge=latest)
|
||||
[![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/simonw/datasette/blob/main/LICENSE)
|
||||
[![docker: datasette](https://img.shields.io/badge/docker-datasette-blue)](https://hub.docker.com/r/datasetteproject/datasette)
|
||||
[![discord](https://img.shields.io/discord/823971286308356157?label=discord)](https://discord.gg/ktd74dm5mw)
|
||||
[![discord](https://img.shields.io/discord/823971286308356157?label=discord)](https://datasette.io/discord)
|
||||
|
||||
*An open source multi-tool for exploring and publishing data*
|
||||
|
||||
|
@ -22,7 +22,7 @@ Datasette is aimed at data journalists, museum curators, archivists, local gover
|
|||
* Comprehensive documentation: https://docs.datasette.io/
|
||||
* Examples: https://datasette.io/examples
|
||||
* Live demo of current `main` branch: https://latest.datasette.io/
|
||||
* Questions, feedback or want to talk about the project? Join our [Discord](https://discord.gg/ktd74dm5mw)
|
||||
* Questions, feedback or want to talk about the project? Join our [Discord](https://datasette.io/discord)
|
||||
|
||||
Want to stay up-to-date with the project? Subscribe to the [Datasette newsletter](https://datasette.substack.com/) for tips, tricks and news on what's new in the Datasette ecosystem.
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from datasette.permissions import Permission # noqa
|
||||
from datasette.version import __version_info__, __version__ # noqa
|
||||
from datasette.events import Event # noqa
|
||||
from datasette.utils.asgi import Forbidden, NotFound, Request, Response # noqa
|
||||
from datasette.utils import actor_matches_allow # noqa
|
||||
from datasette.views import Context # noqa
|
||||
|
|
280
datasette/app.py
280
datasette/app.py
|
@ -8,11 +8,11 @@ import functools
|
|||
import glob
|
||||
import hashlib
|
||||
import httpx
|
||||
import importlib.metadata
|
||||
import inspect
|
||||
from itsdangerous import BadSignature
|
||||
import json
|
||||
import os
|
||||
import pkg_resources
|
||||
import re
|
||||
import secrets
|
||||
import sys
|
||||
|
@ -34,6 +34,7 @@ from jinja2 import (
|
|||
from jinja2.environment import Template
|
||||
from jinja2.exceptions import TemplateNotFound
|
||||
|
||||
from .events import Event
|
||||
from .views import Context
|
||||
from .views.base import ureg
|
||||
from .views.database import database_download, DatabaseView, TableCreateView
|
||||
|
@ -73,12 +74,15 @@ from .utils import (
|
|||
find_spatialite,
|
||||
format_bytes,
|
||||
module_from_path,
|
||||
move_plugins_and_allow,
|
||||
move_table_config,
|
||||
parse_metadata,
|
||||
resolve_env_secrets,
|
||||
resolve_routes,
|
||||
tilde_decode,
|
||||
to_css_class,
|
||||
urlsafe_components,
|
||||
redact_keys,
|
||||
row_sql_params_pks,
|
||||
)
|
||||
from .utils.asgi import (
|
||||
|
@ -242,6 +246,7 @@ class Datasette:
|
|||
cache_headers=True,
|
||||
cors=False,
|
||||
inspect_data=None,
|
||||
config=None,
|
||||
metadata=None,
|
||||
sqlite_extensions=None,
|
||||
template_dir=None,
|
||||
|
@ -255,6 +260,7 @@ class Datasette:
|
|||
pdb=False,
|
||||
crossdb=False,
|
||||
nolock=False,
|
||||
internal=None,
|
||||
):
|
||||
self._startup_invoked = False
|
||||
assert config_dir is None or isinstance(
|
||||
|
@ -303,19 +309,21 @@ class Datasette:
|
|||
self.add_database(
|
||||
Database(self, is_mutable=False, is_memory=True), name="_memory"
|
||||
)
|
||||
# memory_name is a random string so that each Datasette instance gets its own
|
||||
# unique in-memory named database - otherwise unit tests can fail with weird
|
||||
# errors when different instances accidentally share an in-memory database
|
||||
self.add_database(
|
||||
Database(self, memory_name=secrets.token_hex()), name="_internal"
|
||||
)
|
||||
self.internal_db_created = False
|
||||
for file in self.files:
|
||||
self.add_database(
|
||||
Database(self, file, is_mutable=file not in self.immutables)
|
||||
)
|
||||
|
||||
self.internal_db_created = False
|
||||
if internal is None:
|
||||
self._internal_database = Database(self, memory_name=secrets.token_hex())
|
||||
else:
|
||||
self._internal_database = Database(self, path=internal, mode="rwc")
|
||||
self._internal_database.name = "__INTERNAL__"
|
||||
|
||||
self.cache_headers = cache_headers
|
||||
self.cors = cors
|
||||
config_files = []
|
||||
metadata_files = []
|
||||
if config_dir:
|
||||
metadata_files = [
|
||||
|
@ -323,9 +331,26 @@ class Datasette:
|
|||
for filename in ("metadata.json", "metadata.yaml", "metadata.yml")
|
||||
if (config_dir / filename).exists()
|
||||
]
|
||||
config_files = [
|
||||
config_dir / filename
|
||||
for filename in ("datasette.json", "datasette.yaml", "datasette.yml")
|
||||
if (config_dir / filename).exists()
|
||||
]
|
||||
if config_dir and metadata_files and not metadata:
|
||||
with metadata_files[0].open() as fp:
|
||||
metadata = parse_metadata(fp.read())
|
||||
|
||||
if config_dir and config_files and not config:
|
||||
with config_files[0].open() as fp:
|
||||
config = parse_metadata(fp.read())
|
||||
|
||||
# Move any "plugins" and "allow" settings from metadata to config - updates them in place
|
||||
metadata = metadata or {}
|
||||
config = config or {}
|
||||
metadata, config = move_plugins_and_allow(metadata, config)
|
||||
# Now migrate any known table configuration settings over as well
|
||||
metadata, config = move_table_config(metadata, config)
|
||||
|
||||
self._metadata_local = metadata or {}
|
||||
self.sqlite_extensions = []
|
||||
for extension in sqlite_extensions or []:
|
||||
|
@ -344,17 +369,19 @@ class Datasette:
|
|||
if config_dir and (config_dir / "static").is_dir() and not static_mounts:
|
||||
static_mounts = [("static", str((config_dir / "static").resolve()))]
|
||||
self.static_mounts = static_mounts or []
|
||||
if config_dir and (config_dir / "config.json").exists():
|
||||
raise StartupError("config.json should be renamed to settings.json")
|
||||
if config_dir and (config_dir / "settings.json").exists() and not settings:
|
||||
settings = json.loads((config_dir / "settings.json").read_text())
|
||||
# Validate those settings
|
||||
for key in settings:
|
||||
if key not in DEFAULT_SETTINGS:
|
||||
raise StartupError(
|
||||
"Invalid setting '{}' in settings.json".format(key)
|
||||
)
|
||||
self._settings = dict(DEFAULT_SETTINGS, **(settings or {}))
|
||||
if config_dir and (config_dir / "datasette.json").exists() and not config:
|
||||
config = json.loads((config_dir / "datasette.json").read_text())
|
||||
|
||||
config = config or {}
|
||||
config_settings = config.get("settings") or {}
|
||||
|
||||
# validate "settings" keys in datasette.json
|
||||
for key in config_settings:
|
||||
if key not in DEFAULT_SETTINGS:
|
||||
raise StartupError("Invalid setting '{}' in datasette.json".format(key))
|
||||
self.config = config
|
||||
# CLI settings should overwrite datasette.json settings
|
||||
self._settings = dict(DEFAULT_SETTINGS, **(config_settings), **(settings or {}))
|
||||
self.renderers = {} # File extension -> (renderer, can_render) functions
|
||||
self.version_note = version_note
|
||||
if self.setting("num_sql_threads") == 0:
|
||||
|
@ -400,21 +427,45 @@ class Datasette:
|
|||
),
|
||||
]
|
||||
)
|
||||
self.jinja_env = Environment(
|
||||
environment = Environment(
|
||||
loader=template_loader,
|
||||
autoescape=True,
|
||||
enable_async=True,
|
||||
# undefined=StrictUndefined,
|
||||
)
|
||||
self.jinja_env.filters["escape_css_string"] = escape_css_string
|
||||
self.jinja_env.filters["quote_plus"] = urllib.parse.quote_plus
|
||||
self.jinja_env.filters["escape_sqlite"] = escape_sqlite
|
||||
self.jinja_env.filters["to_css_class"] = to_css_class
|
||||
environment.filters["escape_css_string"] = escape_css_string
|
||||
environment.filters["quote_plus"] = urllib.parse.quote_plus
|
||||
self._jinja_env = environment
|
||||
environment.filters["escape_sqlite"] = escape_sqlite
|
||||
environment.filters["to_css_class"] = to_css_class
|
||||
self._register_renderers()
|
||||
self._permission_checks = collections.deque(maxlen=200)
|
||||
self._root_token = secrets.token_hex(32)
|
||||
self.client = DatasetteClient(self)
|
||||
|
||||
def get_jinja_environment(self, request: Request = None) -> Environment:
|
||||
environment = self._jinja_env
|
||||
if request:
|
||||
for environment in pm.hook.jinja2_environment_from_request(
|
||||
datasette=self, request=request, env=environment
|
||||
):
|
||||
pass
|
||||
return environment
|
||||
|
||||
def get_permission(self, name_or_abbr: str) -> "Permission":
|
||||
"""
|
||||
Returns a Permission object for the given name or abbreviation. Raises KeyError if not found.
|
||||
"""
|
||||
if name_or_abbr in self.permissions:
|
||||
return self.permissions[name_or_abbr]
|
||||
# Try abbreviation
|
||||
for permission in self.permissions.values():
|
||||
if permission.abbr == name_or_abbr:
|
||||
return permission
|
||||
raise KeyError(
|
||||
"No permission found with name or abbreviation {}".format(name_or_abbr)
|
||||
)
|
||||
|
||||
async def refresh_schemas(self):
|
||||
if self._refresh_schemas_lock.locked():
|
||||
return
|
||||
|
@ -422,15 +473,14 @@ class Datasette:
|
|||
await self._refresh_schemas()
|
||||
|
||||
async def _refresh_schemas(self):
|
||||
internal_db = self.databases["_internal"]
|
||||
internal_db = self.get_internal_database()
|
||||
if not self.internal_db_created:
|
||||
await init_internal_db(internal_db)
|
||||
self.internal_db_created = True
|
||||
|
||||
current_schema_versions = {
|
||||
row["database_name"]: row["schema_version"]
|
||||
for row in await internal_db.execute(
|
||||
"select database_name, schema_version from databases"
|
||||
"select database_name, schema_version from catalog_databases"
|
||||
)
|
||||
}
|
||||
for database_name, db in self.databases.items():
|
||||
|
@ -445,7 +495,7 @@ class Datasette:
|
|||
values = [database_name, db.is_memory, schema_version]
|
||||
await internal_db.execute_write(
|
||||
"""
|
||||
INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version)
|
||||
INSERT OR REPLACE INTO catalog_databases (database_name, path, is_memory, schema_version)
|
||||
VALUES {}
|
||||
""".format(
|
||||
placeholders
|
||||
|
@ -462,6 +512,14 @@ class Datasette:
|
|||
# This must be called for Datasette to be in a usable state
|
||||
if self._startup_invoked:
|
||||
return
|
||||
# Register event classes
|
||||
event_classes = []
|
||||
for hook in pm.hook.register_events(datasette=self):
|
||||
extra_classes = await await_me_maybe(hook)
|
||||
if extra_classes:
|
||||
event_classes.extend(extra_classes)
|
||||
self.event_classes = tuple(event_classes)
|
||||
|
||||
# Register permissions, but watch out for duplicate name/abbr
|
||||
names = {}
|
||||
abbrs = {}
|
||||
|
@ -481,7 +539,7 @@ class Datasette:
|
|||
abbrs[p.abbr] = p
|
||||
self.permissions[p.name] = p
|
||||
for hook in pm.hook.prepare_jinja2_environment(
|
||||
env=self.jinja_env, datasette=self
|
||||
env=self._jinja_env, datasette=self
|
||||
):
|
||||
await await_me_maybe(hook)
|
||||
for hook in pm.hook.startup(datasette=self):
|
||||
|
@ -540,8 +598,7 @@ class Datasette:
|
|||
raise KeyError
|
||||
return matches[0]
|
||||
if name is None:
|
||||
# Return first database that isn't "_internal"
|
||||
name = [key for key in self.databases.keys() if key != "_internal"][0]
|
||||
name = [key for key in self.databases.keys()][0]
|
||||
return self.databases[name]
|
||||
|
||||
def add_database(self, db, name=None, route=None):
|
||||
|
@ -641,17 +698,48 @@ class Datasette:
|
|||
def _metadata(self):
|
||||
return self.metadata()
|
||||
|
||||
def get_internal_database(self):
|
||||
return self._internal_database
|
||||
|
||||
def plugin_config(self, plugin_name, database=None, table=None, fallback=True):
|
||||
"""Return config for plugin, falling back from specified database/table"""
|
||||
plugins = self.metadata(
|
||||
"plugins", database=database, table=table, fallback=fallback
|
||||
)
|
||||
if plugins is None:
|
||||
return None
|
||||
plugin_config = plugins.get(plugin_name)
|
||||
# Resolve any $file and $env keys
|
||||
plugin_config = resolve_env_secrets(plugin_config, os.environ)
|
||||
return plugin_config
|
||||
if database is None and table is None:
|
||||
config = self._plugin_config_top(plugin_name)
|
||||
else:
|
||||
config = self._plugin_config_nested(plugin_name, database, table, fallback)
|
||||
|
||||
return resolve_env_secrets(config, os.environ)
|
||||
|
||||
def _plugin_config_top(self, plugin_name):
|
||||
"""Returns any top-level plugin configuration for the specified plugin."""
|
||||
return ((self.config or {}).get("plugins") or {}).get(plugin_name)
|
||||
|
||||
def _plugin_config_nested(self, plugin_name, database, table=None, fallback=True):
|
||||
"""Returns any database or table-level plugin configuration for the specified plugin."""
|
||||
db_config = ((self.config or {}).get("databases") or {}).get(database)
|
||||
|
||||
# if there's no db-level configuration, then return early, falling back to top-level if needed
|
||||
if not db_config:
|
||||
return self._plugin_config_top(plugin_name) if fallback else None
|
||||
|
||||
db_plugin_config = (db_config.get("plugins") or {}).get(plugin_name)
|
||||
|
||||
if table:
|
||||
table_plugin_config = (
|
||||
((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {}
|
||||
).get(plugin_name)
|
||||
|
||||
# fallback to db_config or top-level config, in that order, if needed
|
||||
if table_plugin_config is None and fallback:
|
||||
return db_plugin_config or self._plugin_config_top(plugin_name)
|
||||
|
||||
return table_plugin_config
|
||||
|
||||
# fallback to top-level if needed
|
||||
if db_plugin_config is None and fallback:
|
||||
self._plugin_config_top(plugin_name)
|
||||
|
||||
return db_plugin_config
|
||||
|
||||
def app_css_hash(self):
|
||||
if not hasattr(self, "_app_css_hash"):
|
||||
|
@ -662,7 +750,9 @@ class Datasette:
|
|||
return self._app_css_hash
|
||||
|
||||
async def get_canned_queries(self, database_name, actor):
|
||||
queries = self.metadata("queries", database=database_name, fallback=False) or {}
|
||||
queries = (
|
||||
((self.config or {}).get("databases") or {}).get(database_name) or {}
|
||||
).get("queries") or {}
|
||||
for more_queries in pm.hook.canned_queries(
|
||||
datasette=self,
|
||||
database=database_name,
|
||||
|
@ -788,14 +878,33 @@ class Datasette:
|
|||
)
|
||||
return crumbs
|
||||
|
||||
async def actors_from_ids(
|
||||
self, actor_ids: Iterable[Union[str, int]]
|
||||
) -> Dict[Union[id, str], Dict]:
|
||||
result = pm.hook.actors_from_ids(datasette=self, actor_ids=actor_ids)
|
||||
if result is None:
|
||||
# Do the default thing
|
||||
return {actor_id: {"id": actor_id} for actor_id in actor_ids}
|
||||
result = await await_me_maybe(result)
|
||||
return result
|
||||
|
||||
async def track_event(self, event: Event):
|
||||
assert isinstance(event, self.event_classes), "Invalid event type: {}".format(
|
||||
type(event)
|
||||
)
|
||||
for hook in pm.hook.track_event(datasette=self, event=event):
|
||||
await await_me_maybe(hook)
|
||||
|
||||
async def permission_allowed(
|
||||
self, actor, action, resource=None, default=DEFAULT_NOT_SET
|
||||
self, actor, action, resource=None, *, default=DEFAULT_NOT_SET
|
||||
):
|
||||
"""Check permissions using the permissions_allowed plugin hook"""
|
||||
result = None
|
||||
# Use default from registered permission, if available
|
||||
if default is DEFAULT_NOT_SET and action in self.permissions:
|
||||
default = self.permissions[action].default
|
||||
opinions = []
|
||||
# Every plugin is consulted for their opinion
|
||||
for check in pm.hook.permission_allowed(
|
||||
datasette=self,
|
||||
actor=actor,
|
||||
|
@ -804,9 +913,19 @@ class Datasette:
|
|||
):
|
||||
check = await await_me_maybe(check)
|
||||
if check is not None:
|
||||
result = check
|
||||
opinions.append(check)
|
||||
|
||||
result = None
|
||||
# If any plugin said False it's false - the veto rule
|
||||
if any(not r for r in opinions):
|
||||
result = False
|
||||
elif any(r for r in opinions):
|
||||
# Otherwise, if any plugin said True it's true
|
||||
result = True
|
||||
|
||||
used_default = False
|
||||
if result is None:
|
||||
# No plugin expressed an opinion, so use the default
|
||||
result = default
|
||||
used_default = True
|
||||
self._permission_checks.append(
|
||||
|
@ -904,7 +1023,7 @@ class Datasette:
|
|||
log_sql_errors=log_sql_errors,
|
||||
)
|
||||
|
||||
async def expand_foreign_keys(self, database, table, column, values):
|
||||
async def expand_foreign_keys(self, actor, database, table, column, values):
|
||||
"""Returns dict mapping (column, value) -> label"""
|
||||
labeled_fks = {}
|
||||
db = self.databases[database]
|
||||
|
@ -918,7 +1037,20 @@ class Datasette:
|
|||
][0]
|
||||
except IndexError:
|
||||
return {}
|
||||
label_column = await db.label_column_for_table(fk["other_table"])
|
||||
# Ensure user has permission to view the referenced table
|
||||
other_table = fk["other_table"]
|
||||
other_column = fk["other_column"]
|
||||
visible, _ = await self.check_visibility(
|
||||
actor,
|
||||
permissions=[
|
||||
("view-table", (database, other_table)),
|
||||
("view-database", database),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
return {}
|
||||
label_column = await db.label_column_for_table(other_table)
|
||||
if not label_column:
|
||||
return {(fk["column"], value): str(value) for value in values}
|
||||
labeled_fks = {}
|
||||
|
@ -927,9 +1059,9 @@ class Datasette:
|
|||
from {other_table}
|
||||
where {other_column} in ({placeholders})
|
||||
""".format(
|
||||
other_column=escape_sqlite(fk["other_column"]),
|
||||
other_column=escape_sqlite(other_column),
|
||||
label_column=escape_sqlite(label_column),
|
||||
other_table=escape_sqlite(fk["other_table"]),
|
||||
other_table=escape_sqlite(other_table),
|
||||
placeholders=", ".join(["?"] * len(set(values))),
|
||||
)
|
||||
try:
|
||||
|
@ -964,7 +1096,6 @@ class Datasette:
|
|||
"hash": d.hash,
|
||||
}
|
||||
for name, d in self.databases.items()
|
||||
if name != "_internal"
|
||||
]
|
||||
|
||||
def _versions(self):
|
||||
|
@ -1037,9 +1168,9 @@ class Datasette:
|
|||
if using_pysqlite3:
|
||||
for package in ("pysqlite3", "pysqlite3-binary"):
|
||||
try:
|
||||
info["pysqlite3"] = pkg_resources.get_distribution(package).version
|
||||
info["pysqlite3"] = importlib.metadata.version(package)
|
||||
break
|
||||
except pkg_resources.DistributionNotFound:
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
pass
|
||||
return info
|
||||
|
||||
|
@ -1086,10 +1217,11 @@ class Datasette:
|
|||
def _actor(self, request):
|
||||
return {"actor": request.actor}
|
||||
|
||||
def table_metadata(self, database, table):
|
||||
"""Fetch table-specific metadata."""
|
||||
async def table_config(self, database: str, table: str) -> dict:
|
||||
"""Return dictionary of configuration for specified table"""
|
||||
return (
|
||||
(self.metadata("databases") or {})
|
||||
(self.config or {})
|
||||
.get("databases", {})
|
||||
.get(database, {})
|
||||
.get("tables", {})
|
||||
.get(table, {})
|
||||
|
@ -1131,7 +1263,7 @@ class Datasette:
|
|||
else:
|
||||
if isinstance(templates, str):
|
||||
templates = [templates]
|
||||
template = self.jinja_env.select_template(templates)
|
||||
template = self.get_jinja_environment(request).select_template(templates)
|
||||
if dataclasses.is_dataclass(context):
|
||||
context = dataclasses.asdict(context)
|
||||
body_scripts = []
|
||||
|
@ -1234,7 +1366,7 @@ class Datasette:
|
|||
):
|
||||
hook = await await_me_maybe(hook)
|
||||
collected.extend(hook)
|
||||
collected.extend(self.metadata(key) or [])
|
||||
collected.extend((self.config or {}).get(key) or [])
|
||||
output = []
|
||||
for url_or_dict in collected:
|
||||
if isinstance(url_or_dict, dict):
|
||||
|
@ -1259,6 +1391,11 @@ class Datasette:
|
|||
output.append(script)
|
||||
return output
|
||||
|
||||
def _config(self):
|
||||
return redact_keys(
|
||||
self.config, ("secret", "key", "password", "token", "hash", "dsn")
|
||||
)
|
||||
|
||||
def _routes(self):
|
||||
routes = []
|
||||
|
||||
|
@ -1318,12 +1455,8 @@ class Datasette:
|
|||
r"/-/settings(\.(?P<format>json))?$",
|
||||
)
|
||||
add_route(
|
||||
permanent_redirect("/-/settings.json"),
|
||||
r"/-/config.json",
|
||||
)
|
||||
add_route(
|
||||
permanent_redirect("/-/settings"),
|
||||
r"/-/config",
|
||||
JsonDataView.as_view(self, "config.json", lambda: self._config()),
|
||||
r"/-/config(\.(?P<format>json))?$",
|
||||
)
|
||||
add_route(
|
||||
JsonDataView.as_view(self, "threads.json", self._threads),
|
||||
|
@ -1481,16 +1614,6 @@ class DatasetteRouter:
|
|||
def __init__(self, datasette, routes):
|
||||
self.ds = datasette
|
||||
self.routes = routes or []
|
||||
# Build a list of pages/blah/{name}.html matching expressions
|
||||
pattern_templates = [
|
||||
filepath
|
||||
for filepath in self.ds.jinja_env.list_templates()
|
||||
if "{" in filepath and filepath.startswith("pages/")
|
||||
]
|
||||
self.page_routes = [
|
||||
(route_pattern_from_filepath(filepath[len("pages/") :]), filepath)
|
||||
for filepath in pattern_templates
|
||||
]
|
||||
|
||||
async def __call__(self, scope, receive, send):
|
||||
# Because we care about "foo/bar" v.s. "foo%2Fbar" we decode raw_path ourselves
|
||||
|
@ -1590,13 +1713,24 @@ class DatasetteRouter:
|
|||
route_path = request.scope.get("route_path", request.scope["path"])
|
||||
# Jinja requires template names to use "/" even on Windows
|
||||
template_name = "pages" + route_path + ".html"
|
||||
# Build a list of pages/blah/{name}.html matching expressions
|
||||
environment = self.ds.get_jinja_environment(request)
|
||||
pattern_templates = [
|
||||
filepath
|
||||
for filepath in environment.list_templates()
|
||||
if "{" in filepath and filepath.startswith("pages/")
|
||||
]
|
||||
page_routes = [
|
||||
(route_pattern_from_filepath(filepath[len("pages/") :]), filepath)
|
||||
for filepath in pattern_templates
|
||||
]
|
||||
try:
|
||||
template = self.ds.jinja_env.select_template([template_name])
|
||||
template = environment.select_template([template_name])
|
||||
except TemplateNotFound:
|
||||
template = None
|
||||
if template is None:
|
||||
# Try for a pages/blah/{name}.html template match
|
||||
for regex, wildcard_template in self.page_routes:
|
||||
for regex, wildcard_template in page_routes:
|
||||
match = regex.match(route_path)
|
||||
if match is not None:
|
||||
context.update(match.groupdict())
|
||||
|
|
142
datasette/cli.py
142
datasette/cli.py
|
@ -15,7 +15,6 @@ import sys
|
|||
import textwrap
|
||||
import webbrowser
|
||||
from .app import (
|
||||
OBSOLETE_SETTINGS,
|
||||
Datasette,
|
||||
DEFAULT_SETTINGS,
|
||||
SETTINGS,
|
||||
|
@ -31,6 +30,7 @@ from .utils import (
|
|||
ConnectionProblem,
|
||||
SpatialiteConnectionProblem,
|
||||
initial_path_for_datasette,
|
||||
pairs_to_nested_config,
|
||||
temporary_docker_directory,
|
||||
value_as_boolean,
|
||||
SpatialiteNotFound,
|
||||
|
@ -50,81 +50,33 @@ except ImportError:
|
|||
pass
|
||||
|
||||
|
||||
class Config(click.ParamType):
|
||||
# This will be removed in Datasette 1.0 in favour of class Setting
|
||||
name = "config"
|
||||
|
||||
def convert(self, config, param, ctx):
|
||||
if ":" not in config:
|
||||
self.fail(f'"{config}" should be name:value', param, ctx)
|
||||
return
|
||||
name, value = config.split(":", 1)
|
||||
if name not in DEFAULT_SETTINGS:
|
||||
msg = (
|
||||
OBSOLETE_SETTINGS.get(name)
|
||||
or f"{name} is not a valid option (--help-settings to see all)"
|
||||
)
|
||||
self.fail(
|
||||
msg,
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return
|
||||
# Type checking
|
||||
default = DEFAULT_SETTINGS[name]
|
||||
if isinstance(default, bool):
|
||||
try:
|
||||
return name, value_as_boolean(value)
|
||||
except ValueAsBooleanError:
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
return
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return
|
||||
return name, int(value)
|
||||
elif isinstance(default, str):
|
||||
return name, value
|
||||
else:
|
||||
# Should never happen:
|
||||
self.fail("Invalid option")
|
||||
|
||||
|
||||
class Setting(CompositeParamType):
|
||||
name = "setting"
|
||||
arity = 2
|
||||
|
||||
def convert(self, config, param, ctx):
|
||||
name, value = config
|
||||
if name not in DEFAULT_SETTINGS:
|
||||
msg = (
|
||||
OBSOLETE_SETTINGS.get(name)
|
||||
or f"{name} is not a valid option (--help-settings to see all)"
|
||||
)
|
||||
self.fail(
|
||||
msg,
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return
|
||||
# Type checking
|
||||
default = DEFAULT_SETTINGS[name]
|
||||
if isinstance(default, bool):
|
||||
try:
|
||||
return name, value_as_boolean(value)
|
||||
except ValueAsBooleanError:
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
return
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return
|
||||
return name, int(value)
|
||||
elif isinstance(default, str):
|
||||
return name, value
|
||||
else:
|
||||
# Should never happen:
|
||||
self.fail("Invalid option")
|
||||
if name in DEFAULT_SETTINGS:
|
||||
# For backwards compatibility with how this worked prior to
|
||||
# Datasette 1.0, we turn bare setting names into setting.name
|
||||
# Type checking for those older settings
|
||||
default = DEFAULT_SETTINGS[name]
|
||||
name = "settings.{}".format(name)
|
||||
if isinstance(default, bool):
|
||||
try:
|
||||
return name, "true" if value_as_boolean(value) else "false"
|
||||
except ValueAsBooleanError:
|
||||
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
||||
elif isinstance(default, int):
|
||||
if not value.isdigit():
|
||||
self.fail(f'"{name}" should be an integer', param, ctx)
|
||||
return name, value
|
||||
elif isinstance(default, str):
|
||||
return name, value
|
||||
else:
|
||||
# Should never happen:
|
||||
self.fail("Invalid option")
|
||||
return name, value
|
||||
|
||||
|
||||
def sqlite_extensions(fn):
|
||||
|
@ -195,9 +147,6 @@ async def inspect_(files, sqlite_extensions):
|
|||
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
||||
data = {}
|
||||
for name, database in app.databases.items():
|
||||
if name == "_internal":
|
||||
# Don't include the in-memory _internal database
|
||||
continue
|
||||
counts = await database.table_counts(limit=3600 * 1000)
|
||||
data[name] = {
|
||||
"hash": database.hash,
|
||||
|
@ -455,16 +404,17 @@ def uninstall(packages, yes):
|
|||
)
|
||||
@click.option("--memory", is_flag=True, help="Make /_memory database available")
|
||||
@click.option(
|
||||
"-c",
|
||||
"--config",
|
||||
type=Config(),
|
||||
help="Deprecated: set config option using configname:value. Use --setting instead.",
|
||||
multiple=True,
|
||||
type=click.File(mode="r"),
|
||||
help="Path to JSON/YAML Datasette configuration file",
|
||||
)
|
||||
@click.option(
|
||||
"-s",
|
||||
"--setting",
|
||||
"settings",
|
||||
type=Setting(),
|
||||
help="Setting, see docs.datasette.io/en/stable/settings.html",
|
||||
help="nested.key, value setting to use in Datasette configuration",
|
||||
multiple=True,
|
||||
)
|
||||
@click.option(
|
||||
|
@ -485,6 +435,10 @@ def uninstall(packages, yes):
|
|||
"--token",
|
||||
help="API token to send with --get requests",
|
||||
)
|
||||
@click.option(
|
||||
"--actor",
|
||||
help="Actor to use for --get requests (JSON string)",
|
||||
)
|
||||
@click.option("--version-note", help="Additional note to show on /-/versions")
|
||||
@click.option("--help-settings", is_flag=True, help="Show available settings")
|
||||
@click.option("--pdb", is_flag=True, help="Launch debugger on any errors")
|
||||
|
@ -518,6 +472,11 @@ def uninstall(packages, yes):
|
|||
"--ssl-certfile",
|
||||
help="SSL certificate file",
|
||||
)
|
||||
@click.option(
|
||||
"--internal",
|
||||
type=click.Path(),
|
||||
help="Path to a persistent Datasette internal SQLite database",
|
||||
)
|
||||
def serve(
|
||||
files,
|
||||
immutable,
|
||||
|
@ -539,6 +498,7 @@ def serve(
|
|||
root,
|
||||
get,
|
||||
token,
|
||||
actor,
|
||||
version_note,
|
||||
help_settings,
|
||||
pdb,
|
||||
|
@ -548,6 +508,7 @@ def serve(
|
|||
nolock,
|
||||
ssl_keyfile,
|
||||
ssl_certfile,
|
||||
internal,
|
||||
return_instance=False,
|
||||
):
|
||||
"""Serve up specified SQLite database files with a web UI"""
|
||||
|
@ -568,6 +529,8 @@ def serve(
|
|||
reloader = hupper.start_reloader("datasette.cli.serve")
|
||||
if immutable:
|
||||
reloader.watch_files(immutable)
|
||||
if config:
|
||||
reloader.watch_files([config.name])
|
||||
if metadata:
|
||||
reloader.watch_files([metadata.name])
|
||||
|
||||
|
@ -580,32 +543,36 @@ def serve(
|
|||
if metadata:
|
||||
metadata_data = parse_metadata(metadata.read())
|
||||
|
||||
combined_settings = {}
|
||||
config_data = None
|
||||
if config:
|
||||
click.echo(
|
||||
"--config name:value will be deprecated in Datasette 1.0, use --setting name value instead",
|
||||
err=True,
|
||||
)
|
||||
combined_settings.update(config)
|
||||
combined_settings.update(settings)
|
||||
config_data = parse_metadata(config.read())
|
||||
|
||||
config_data = config_data or {}
|
||||
|
||||
# Merge in settings from -s/--setting
|
||||
if settings:
|
||||
settings_updates = pairs_to_nested_config(settings)
|
||||
config_data.update(settings_updates)
|
||||
|
||||
kwargs = dict(
|
||||
immutables=immutable,
|
||||
cache_headers=not reload,
|
||||
cors=cors,
|
||||
inspect_data=inspect_data,
|
||||
config=config_data,
|
||||
metadata=metadata_data,
|
||||
sqlite_extensions=sqlite_extensions,
|
||||
template_dir=template_dir,
|
||||
plugins_dir=plugins_dir,
|
||||
static_mounts=static,
|
||||
settings=combined_settings,
|
||||
settings=None, # These are passed in config= now
|
||||
memory=memory,
|
||||
secret=secret,
|
||||
version_note=version_note,
|
||||
pdb=pdb,
|
||||
crossdb=crossdb,
|
||||
nolock=nolock,
|
||||
internal=internal,
|
||||
)
|
||||
|
||||
# if files is a single directory, use that as config_dir=
|
||||
|
@ -653,7 +620,10 @@ def serve(
|
|||
headers = {}
|
||||
if token:
|
||||
headers["Authorization"] = "Bearer {}".format(token)
|
||||
response = client.get(get, headers=headers)
|
||||
cookies = {}
|
||||
if actor:
|
||||
cookies["ds_actor"] = client.actor_cookie(json.loads(actor))
|
||||
response = client.get(get, headers=headers, cookies=cookies)
|
||||
click.echo(response.text)
|
||||
exit_code = 0 if response.status == 200 else 1
|
||||
sys.exit(exit_code)
|
||||
|
|
|
@ -14,6 +14,7 @@ from .utils import (
|
|||
detect_spatialite,
|
||||
get_all_foreign_keys,
|
||||
get_outbound_foreign_keys,
|
||||
md5_not_usedforsecurity,
|
||||
sqlite_timelimit,
|
||||
sqlite3,
|
||||
table_columns,
|
||||
|
@ -28,7 +29,13 @@ AttachedDatabase = namedtuple("AttachedDatabase", ("seq", "name", "file"))
|
|||
|
||||
class Database:
|
||||
def __init__(
|
||||
self, ds, path=None, is_mutable=True, is_memory=False, memory_name=None
|
||||
self,
|
||||
ds,
|
||||
path=None,
|
||||
is_mutable=True,
|
||||
is_memory=False,
|
||||
memory_name=None,
|
||||
mode=None,
|
||||
):
|
||||
self.name = None
|
||||
self.route = None
|
||||
|
@ -49,6 +56,7 @@ class Database:
|
|||
self._write_connection = None
|
||||
# This is used to track all file connections so they can be closed
|
||||
self._all_file_connections = []
|
||||
self.mode = mode
|
||||
|
||||
@property
|
||||
def cached_table_counts(self):
|
||||
|
@ -62,6 +70,12 @@ class Database:
|
|||
}
|
||||
return self._cached_table_counts
|
||||
|
||||
@property
|
||||
def color(self):
|
||||
if self.hash:
|
||||
return self.hash[:6]
|
||||
return md5_not_usedforsecurity(self.name)[:6]
|
||||
|
||||
def suggest_name(self):
|
||||
if self.path:
|
||||
return Path(self.path).stem
|
||||
|
@ -83,6 +97,7 @@ class Database:
|
|||
return conn
|
||||
if self.is_memory:
|
||||
return sqlite3.connect(":memory:", uri=True)
|
||||
|
||||
# mode=ro or immutable=1?
|
||||
if self.is_mutable:
|
||||
qs = "?mode=ro"
|
||||
|
@ -93,6 +108,8 @@ class Database:
|
|||
assert not (write and not self.is_mutable)
|
||||
if write:
|
||||
qs = ""
|
||||
if self.mode is not None:
|
||||
qs = f"?mode={self.mode}"
|
||||
conn = sqlite3.connect(
|
||||
f"file:{self.path}{qs}", uri=True, check_same_thread=False
|
||||
)
|
||||
|
@ -106,8 +123,7 @@ class Database:
|
|||
|
||||
async def execute_write(self, sql, params=None, block=True):
|
||||
def _inner(conn):
|
||||
with conn:
|
||||
return conn.execute(sql, params or [])
|
||||
return conn.execute(sql, params or [])
|
||||
|
||||
with trace("sql", database=self.name, sql=sql.strip(), params=params):
|
||||
results = await self.execute_write_fn(_inner, block=block)
|
||||
|
@ -115,8 +131,7 @@ class Database:
|
|||
|
||||
async def execute_write_script(self, sql, block=True):
|
||||
def _inner(conn):
|
||||
with conn:
|
||||
return conn.executescript(sql)
|
||||
return conn.executescript(sql)
|
||||
|
||||
with trace("sql", database=self.name, sql=sql.strip(), executescript=True):
|
||||
results = await self.execute_write_fn(_inner, block=block)
|
||||
|
@ -132,8 +147,7 @@ class Database:
|
|||
count += 1
|
||||
yield param
|
||||
|
||||
with conn:
|
||||
return conn.executemany(sql, count_params(params_seq)), count
|
||||
return conn.executemany(sql, count_params(params_seq)), count
|
||||
|
||||
with trace(
|
||||
"sql", database=self.name, sql=sql.strip(), executemany=True
|
||||
|
@ -142,25 +156,60 @@ class Database:
|
|||
kwargs["count"] = count
|
||||
return results
|
||||
|
||||
async def execute_write_fn(self, fn, block=True):
|
||||
async def execute_isolated_fn(self, fn):
|
||||
# Open a new connection just for the duration of this function
|
||||
# blocking the write queue to avoid any writes occurring during it
|
||||
if self.ds.executor is None:
|
||||
# non-threaded mode
|
||||
isolated_connection = self.connect(write=True)
|
||||
try:
|
||||
result = fn(isolated_connection)
|
||||
finally:
|
||||
isolated_connection.close()
|
||||
try:
|
||||
self._all_file_connections.remove(isolated_connection)
|
||||
except ValueError:
|
||||
# Was probably a memory connection
|
||||
pass
|
||||
return result
|
||||
else:
|
||||
# Threaded mode - send to write thread
|
||||
return await self._send_to_write_thread(fn, isolated_connection=True)
|
||||
|
||||
async def execute_write_fn(self, fn, block=True, transaction=True):
|
||||
if self.ds.executor is None:
|
||||
# non-threaded mode
|
||||
if self._write_connection is None:
|
||||
self._write_connection = self.connect(write=True)
|
||||
self.ds._prepare_connection(self._write_connection, self.name)
|
||||
return fn(self._write_connection)
|
||||
if transaction:
|
||||
with self._write_connection:
|
||||
return fn(self._write_connection)
|
||||
else:
|
||||
return fn(self._write_connection)
|
||||
else:
|
||||
return await self._send_to_write_thread(
|
||||
fn, block=block, transaction=transaction
|
||||
)
|
||||
|
||||
# threaded mode
|
||||
task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io")
|
||||
async def _send_to_write_thread(
|
||||
self, fn, block=True, isolated_connection=False, transaction=True
|
||||
):
|
||||
if self._write_queue is None:
|
||||
self._write_queue = queue.Queue()
|
||||
if self._write_thread is None:
|
||||
self._write_thread = threading.Thread(
|
||||
target=self._execute_writes, daemon=True
|
||||
)
|
||||
self._write_thread.name = "_execute_writes for database {}".format(
|
||||
self.name
|
||||
)
|
||||
self._write_thread.start()
|
||||
task_id = uuid.uuid5(uuid.NAMESPACE_DNS, "datasette.io")
|
||||
reply_queue = janus.Queue()
|
||||
self._write_queue.put(WriteTask(fn, task_id, reply_queue))
|
||||
self._write_queue.put(
|
||||
WriteTask(fn, task_id, reply_queue, isolated_connection, transaction)
|
||||
)
|
||||
if block:
|
||||
result = await reply_queue.async_q.get()
|
||||
if isinstance(result, Exception):
|
||||
|
@ -185,12 +234,32 @@ class Database:
|
|||
if conn_exception is not None:
|
||||
result = conn_exception
|
||||
else:
|
||||
try:
|
||||
result = task.fn(conn)
|
||||
except Exception as e:
|
||||
sys.stderr.write("{}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
result = e
|
||||
if task.isolated_connection:
|
||||
isolated_connection = self.connect(write=True)
|
||||
try:
|
||||
result = task.fn(isolated_connection)
|
||||
except Exception as e:
|
||||
sys.stderr.write("{}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
result = e
|
||||
finally:
|
||||
isolated_connection.close()
|
||||
try:
|
||||
self._all_file_connections.remove(isolated_connection)
|
||||
except ValueError:
|
||||
# Was probably a memory connection
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
if task.transaction:
|
||||
with conn:
|
||||
result = task.fn(conn)
|
||||
else:
|
||||
result = task.fn(conn)
|
||||
except Exception as e:
|
||||
sys.stderr.write("{}\n".format(e))
|
||||
sys.stderr.flush()
|
||||
result = e
|
||||
task.reply_queue.sync_q.put(result)
|
||||
|
||||
async def execute_fn(self, fn):
|
||||
|
@ -363,7 +432,7 @@ class Database:
|
|||
return await self.execute_fn(lambda conn: detect_fts(conn, table))
|
||||
|
||||
async def label_column_for_table(self, table):
|
||||
explicit_label_column = self.ds.table_metadata(self.name, table).get(
|
||||
explicit_label_column = (await self.ds.table_config(self.name, table)).get(
|
||||
"label_column"
|
||||
)
|
||||
if explicit_label_column:
|
||||
|
@ -400,6 +469,7 @@ class Database:
|
|||
and (
|
||||
sql like '%VIRTUAL TABLE%USING FTS%'
|
||||
) or name in ('sqlite_stat1', 'sqlite_stat2', 'sqlite_stat3', 'sqlite_stat4')
|
||||
or name like '\\_%' escape '\\'
|
||||
"""
|
||||
)
|
||||
).rows
|
||||
|
@ -432,13 +502,11 @@ class Database:
|
|||
)
|
||||
).rows
|
||||
]
|
||||
# Add any from metadata.json
|
||||
db_metadata = self.ds.metadata(database=self.name)
|
||||
if "tables" in db_metadata:
|
||||
# Add any tables marked as hidden in config
|
||||
db_config = self.ds.config.get("databases", {}).get(self.name, {})
|
||||
if "tables" in db_config:
|
||||
hidden_tables += [
|
||||
t
|
||||
for t in db_metadata["tables"]
|
||||
if db_metadata["tables"][t].get("hidden")
|
||||
t for t in db_config["tables"] if db_config["tables"][t].get("hidden")
|
||||
]
|
||||
# Also mark as hidden any tables which start with the name of a hidden table
|
||||
# e.g. "searchable_fts" implies "searchable_fts_content" should be hidden
|
||||
|
@ -498,12 +566,14 @@ class Database:
|
|||
|
||||
|
||||
class WriteTask:
|
||||
__slots__ = ("fn", "task_id", "reply_queue")
|
||||
__slots__ = ("fn", "task_id", "reply_queue", "isolated_connection", "transaction")
|
||||
|
||||
def __init__(self, fn, task_id, reply_queue):
|
||||
def __init__(self, fn, task_id, reply_queue, isolated_connection, transaction):
|
||||
self.fn = fn
|
||||
self.task_id = task_id
|
||||
self.reply_queue = reply_queue
|
||||
self.isolated_connection = isolated_connection
|
||||
self.transaction = transaction
|
||||
|
||||
|
||||
class QueryInterrupted(Exception):
|
||||
|
|
|
@ -2,39 +2,128 @@ from datasette import hookimpl, Permission
|
|||
from datasette.utils import actor_matches_allow
|
||||
import itsdangerous
|
||||
import time
|
||||
from typing import Union, Tuple
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_permissions():
|
||||
return (
|
||||
# name, abbr, description, takes_database, takes_resource, default
|
||||
Permission(
|
||||
"view-instance", "vi", "View Datasette instance", False, False, True
|
||||
),
|
||||
Permission("view-database", "vd", "View database", True, False, True),
|
||||
Permission(
|
||||
"view-database-download", "vdd", "Download database file", True, False, True
|
||||
),
|
||||
Permission("view-table", "vt", "View table", True, True, True),
|
||||
Permission("view-query", "vq", "View named query results", True, True, True),
|
||||
Permission(
|
||||
"execute-sql", "es", "Execute read-only SQL queries", True, False, True
|
||||
name="view-instance",
|
||||
abbr="vi",
|
||||
description="View Datasette instance",
|
||||
takes_database=False,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
),
|
||||
Permission(
|
||||
"permissions-debug",
|
||||
"pd",
|
||||
"Access permission debug tool",
|
||||
False,
|
||||
False,
|
||||
False,
|
||||
name="view-database",
|
||||
abbr="vd",
|
||||
description="View database",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-database-download",
|
||||
abbr="vdd",
|
||||
description="Download database file",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-table",
|
||||
abbr="vt",
|
||||
description="View table",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="view-query",
|
||||
abbr="vq",
|
||||
description="View named query results",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="execute-sql",
|
||||
abbr="es",
|
||||
description="Execute read-only SQL queries",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=True,
|
||||
implies_can_view=True,
|
||||
),
|
||||
Permission(
|
||||
name="permissions-debug",
|
||||
abbr="pd",
|
||||
description="Access permission debug tool",
|
||||
takes_database=False,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="debug-menu",
|
||||
abbr="dm",
|
||||
description="View debug menu items",
|
||||
takes_database=False,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="insert-row",
|
||||
abbr="ir",
|
||||
description="Insert rows",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="delete-row",
|
||||
abbr="dr",
|
||||
description="Delete rows",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="update-row",
|
||||
abbr="ur",
|
||||
description="Update rows",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="create-table",
|
||||
abbr="ct",
|
||||
description="Create tables",
|
||||
takes_database=True,
|
||||
takes_resource=False,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="alter-table",
|
||||
abbr="at",
|
||||
description="Alter tables",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission(
|
||||
name="drop-table",
|
||||
abbr="dt",
|
||||
description="Drop tables",
|
||||
takes_database=True,
|
||||
takes_resource=True,
|
||||
default=False,
|
||||
),
|
||||
Permission("debug-menu", "dm", "View debug menu items", False, False, False),
|
||||
# Write API permissions
|
||||
Permission("insert-row", "ir", "Insert rows", True, True, False),
|
||||
Permission("delete-row", "dr", "Delete rows", True, True, False),
|
||||
Permission("update-row", "ur", "Update rows", True, True, False),
|
||||
Permission("create-table", "ct", "Create tables", True, False, False),
|
||||
Permission("drop-table", "dt", "Drop tables", True, True, False),
|
||||
)
|
||||
|
||||
|
||||
|
@ -47,6 +136,7 @@ def permission_allowed_default(datasette, actor, action, resource):
|
|||
"debug-menu",
|
||||
"insert-row",
|
||||
"create-table",
|
||||
"alter-table",
|
||||
"drop-table",
|
||||
"delete-row",
|
||||
"update-row",
|
||||
|
@ -54,7 +144,7 @@ def permission_allowed_default(datasette, actor, action, resource):
|
|||
if actor and actor.get("id") == "root":
|
||||
return True
|
||||
|
||||
# Resolve metadata view permissions
|
||||
# Resolve view permissions in allow blocks in configuration
|
||||
if action in (
|
||||
"view-instance",
|
||||
"view-database",
|
||||
|
@ -62,14 +152,14 @@ def permission_allowed_default(datasette, actor, action, resource):
|
|||
"view-query",
|
||||
"execute-sql",
|
||||
):
|
||||
result = await _resolve_metadata_view_permissions(
|
||||
result = await _resolve_config_view_permissions(
|
||||
datasette, actor, action, resource
|
||||
)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# Check custom permissions: blocks
|
||||
result = await _resolve_metadata_permissions_blocks(
|
||||
# Resolve custom permissions: blocks in configuration
|
||||
result = await _resolve_config_permissions_blocks(
|
||||
datasette, actor, action, resource
|
||||
)
|
||||
if result is not None:
|
||||
|
@ -82,10 +172,10 @@ def permission_allowed_default(datasette, actor, action, resource):
|
|||
return inner
|
||||
|
||||
|
||||
async def _resolve_metadata_permissions_blocks(datasette, actor, action, resource):
|
||||
async def _resolve_config_permissions_blocks(datasette, actor, action, resource):
|
||||
# Check custom permissions: blocks
|
||||
metadata = datasette.metadata()
|
||||
root_block = (metadata.get("permissions", None) or {}).get(action)
|
||||
config = datasette.config or {}
|
||||
root_block = (config.get("permissions", None) or {}).get(action)
|
||||
if root_block:
|
||||
root_result = actor_matches_allow(actor, root_block)
|
||||
if root_result is not None:
|
||||
|
@ -98,7 +188,7 @@ async def _resolve_metadata_permissions_blocks(datasette, actor, action, resourc
|
|||
else:
|
||||
database = resource[0]
|
||||
database_block = (
|
||||
(metadata.get("databases", {}).get(database, {}).get("permissions", None)) or {}
|
||||
(config.get("databases", {}).get(database, {}).get("permissions", None)) or {}
|
||||
).get(action)
|
||||
if database_block:
|
||||
database_result = actor_matches_allow(actor, database_block)
|
||||
|
@ -110,7 +200,7 @@ async def _resolve_metadata_permissions_blocks(datasette, actor, action, resourc
|
|||
database, table_or_query = resource
|
||||
table_block = (
|
||||
(
|
||||
metadata.get("databases", {})
|
||||
config.get("databases", {})
|
||||
.get(database, {})
|
||||
.get("tables", {})
|
||||
.get(table_or_query, {})
|
||||
|
@ -125,7 +215,7 @@ async def _resolve_metadata_permissions_blocks(datasette, actor, action, resourc
|
|||
# Finally the canned queries
|
||||
query_block = (
|
||||
(
|
||||
metadata.get("databases", {})
|
||||
config.get("databases", {})
|
||||
.get(database, {})
|
||||
.get("queries", {})
|
||||
.get(table_or_query, {})
|
||||
|
@ -140,27 +230,30 @@ async def _resolve_metadata_permissions_blocks(datasette, actor, action, resourc
|
|||
return None
|
||||
|
||||
|
||||
async def _resolve_metadata_view_permissions(datasette, actor, action, resource):
|
||||
async def _resolve_config_view_permissions(datasette, actor, action, resource):
|
||||
config = datasette.config or {}
|
||||
if action == "view-instance":
|
||||
allow = datasette.metadata("allow")
|
||||
allow = config.get("allow")
|
||||
if allow is not None:
|
||||
return actor_matches_allow(actor, allow)
|
||||
elif action == "view-database":
|
||||
if resource == "_internal" and (actor is None or actor.get("id") != "root"):
|
||||
return False
|
||||
database_allow = datasette.metadata("allow", database=resource)
|
||||
database_allow = ((config.get("databases") or {}).get(resource) or {}).get(
|
||||
"allow"
|
||||
)
|
||||
if database_allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, database_allow)
|
||||
elif action == "view-table":
|
||||
database, table = resource
|
||||
tables = datasette.metadata("tables", database=database) or {}
|
||||
tables = ((config.get("databases") or {}).get(database) or {}).get(
|
||||
"tables"
|
||||
) or {}
|
||||
table_allow = (tables.get(table) or {}).get("allow")
|
||||
if table_allow is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, table_allow)
|
||||
elif action == "view-query":
|
||||
# Check if this query has a "allow" block in metadata
|
||||
# Check if this query has a "allow" block in config
|
||||
database, query_name = resource
|
||||
query = await datasette.get_canned_query(database, query_name, actor)
|
||||
assert query is not None
|
||||
|
@ -170,14 +263,90 @@ async def _resolve_metadata_view_permissions(datasette, actor, action, resource)
|
|||
return actor_matches_allow(actor, allow)
|
||||
elif action == "execute-sql":
|
||||
# Use allow_sql block from database block, or from top-level
|
||||
database_allow_sql = datasette.metadata("allow_sql", database=resource)
|
||||
database_allow_sql = ((config.get("databases") or {}).get(resource) or {}).get(
|
||||
"allow_sql"
|
||||
)
|
||||
if database_allow_sql is None:
|
||||
database_allow_sql = datasette.metadata("allow_sql")
|
||||
database_allow_sql = config.get("allow_sql")
|
||||
if database_allow_sql is None:
|
||||
return None
|
||||
return actor_matches_allow(actor, database_allow_sql)
|
||||
|
||||
|
||||
def restrictions_allow_action(
|
||||
datasette: "Datasette",
|
||||
restrictions: dict,
|
||||
action: str,
|
||||
resource: Union[str, Tuple[str, str]],
|
||||
):
|
||||
"Do these restrictions allow the requested action against the requested resource?"
|
||||
if action == "view-instance":
|
||||
# Special case for view-instance: it's allowed if the restrictions include any
|
||||
# permissions that have the implies_can_view=True flag set
|
||||
all_rules = restrictions.get("a") or []
|
||||
for database_rules in (restrictions.get("d") or {}).values():
|
||||
all_rules += database_rules
|
||||
for database_resource_rules in (restrictions.get("r") or {}).values():
|
||||
for resource_rules in database_resource_rules.values():
|
||||
all_rules += resource_rules
|
||||
permissions = [datasette.get_permission(action) for action in all_rules]
|
||||
if any(p for p in permissions if p.implies_can_view):
|
||||
return True
|
||||
|
||||
if action == "view-database":
|
||||
# Special case for view-database: it's allowed if the restrictions include any
|
||||
# permissions that have the implies_can_view=True flag set AND takes_database
|
||||
all_rules = restrictions.get("a") or []
|
||||
database_rules = list((restrictions.get("d") or {}).get(resource) or [])
|
||||
all_rules += database_rules
|
||||
resource_rules = ((restrictions.get("r") or {}).get(resource) or {}).values()
|
||||
for resource_rules in (restrictions.get("r") or {}).values():
|
||||
for table_rules in resource_rules.values():
|
||||
all_rules += table_rules
|
||||
permissions = [datasette.get_permission(action) for action in all_rules]
|
||||
if any(p for p in permissions if p.implies_can_view and p.takes_database):
|
||||
return True
|
||||
|
||||
# Does this action have an abbreviation?
|
||||
to_check = {action}
|
||||
permission = datasette.permissions.get(action)
|
||||
if permission and permission.abbr:
|
||||
to_check.add(permission.abbr)
|
||||
|
||||
# If restrictions is defined then we use those to further restrict the actor
|
||||
# Crucially, we only use this to say NO (return False) - we never
|
||||
# use it to return YES (True) because that might over-ride other
|
||||
# restrictions placed on this actor
|
||||
all_allowed = restrictions.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if to_check.intersection(all_allowed):
|
||||
return True
|
||||
# How about for the current database?
|
||||
if resource:
|
||||
if isinstance(resource, str):
|
||||
database_name = resource
|
||||
else:
|
||||
database_name = resource[0]
|
||||
database_allowed = restrictions.get("d", {}).get(database_name)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if to_check.intersection(database_allowed):
|
||||
return True
|
||||
# Or the current table? That's any time the resource is (database, table)
|
||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = restrictions.get("r", {}).get(database, {}).get(table)
|
||||
# TODO: What should this do for canned queries?
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if to_check.intersection(table_allowed):
|
||||
return True
|
||||
|
||||
# This action is not specifically allowed, so reject it
|
||||
return False
|
||||
|
||||
|
||||
@hookimpl(specname="permission_allowed")
|
||||
def permission_allowed_actor_restrictions(datasette, actor, action, resource):
|
||||
if actor is None:
|
||||
|
@ -186,40 +355,12 @@ def permission_allowed_actor_restrictions(datasette, actor, action, resource):
|
|||
# No restrictions, so we have no opinion
|
||||
return None
|
||||
_r = actor.get("_r")
|
||||
|
||||
# Does this action have an abbreviation?
|
||||
to_check = {action}
|
||||
permission = datasette.permissions.get(action)
|
||||
if permission and permission.abbr:
|
||||
to_check.add(permission.abbr)
|
||||
|
||||
# If _r is defined then we use those to further restrict the actor
|
||||
# Crucially, we only use this to say NO (return False) - we never
|
||||
# use it to return YES (True) because that might over-ride other
|
||||
# restrictions placed on this actor
|
||||
all_allowed = _r.get("a")
|
||||
if all_allowed is not None:
|
||||
assert isinstance(all_allowed, list)
|
||||
if to_check.intersection(all_allowed):
|
||||
return None
|
||||
# How about for the current database?
|
||||
if isinstance(resource, str):
|
||||
database_allowed = _r.get("d", {}).get(resource)
|
||||
if database_allowed is not None:
|
||||
assert isinstance(database_allowed, list)
|
||||
if to_check.intersection(database_allowed):
|
||||
return None
|
||||
# Or the current table? That's any time the resource is (database, table)
|
||||
if resource is not None and not isinstance(resource, str) and len(resource) == 2:
|
||||
database, table = resource
|
||||
table_allowed = _r.get("r", {}).get(database, {}).get(table)
|
||||
# TODO: What should this do for canned queries?
|
||||
if table_allowed is not None:
|
||||
assert isinstance(table_allowed, list)
|
||||
if to_check.intersection(table_allowed):
|
||||
return None
|
||||
# This action is not specifically allowed, so reject it
|
||||
return False
|
||||
if restrictions_allow_action(datasette, _r, action, resource):
|
||||
# Return None because we do not have an opinion here
|
||||
return None
|
||||
else:
|
||||
# Block this permission check
|
||||
return False
|
||||
|
||||
|
||||
@hookimpl
|
||||
|
|
|
@ -0,0 +1,236 @@
|
|||
from abc import ABC, abstractproperty
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datasette.hookspecs import hookimpl
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class Event(ABC):
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
pass
|
||||
|
||||
created: datetime = field(
|
||||
init=False, default_factory=lambda: datetime.now(timezone.utc)
|
||||
)
|
||||
actor: Optional[dict]
|
||||
|
||||
def properties(self):
|
||||
properties = asdict(self)
|
||||
properties.pop("actor", None)
|
||||
properties.pop("created", None)
|
||||
return properties
|
||||
|
||||
|
||||
@dataclass
|
||||
class LoginEvent(Event):
|
||||
"""
|
||||
Event name: ``login``
|
||||
|
||||
A user (represented by ``event.actor``) has logged in.
|
||||
"""
|
||||
|
||||
name = "login"
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogoutEvent(Event):
|
||||
"""
|
||||
Event name: ``logout``
|
||||
|
||||
A user (represented by ``event.actor``) has logged out.
|
||||
"""
|
||||
|
||||
name = "logout"
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateTokenEvent(Event):
|
||||
"""
|
||||
Event name: ``create-token``
|
||||
|
||||
A user created an API token.
|
||||
|
||||
:ivar expires_after: Number of seconds after which this token will expire.
|
||||
:type expires_after: int or None
|
||||
:ivar restrict_all: Restricted permissions for this token.
|
||||
:type restrict_all: list
|
||||
:ivar restrict_database: Restricted database permissions for this token.
|
||||
:type restrict_database: dict
|
||||
:ivar restrict_resource: Restricted resource permissions for this token.
|
||||
:type restrict_resource: dict
|
||||
"""
|
||||
|
||||
name = "create-token"
|
||||
expires_after: Optional[int]
|
||||
restrict_all: list
|
||||
restrict_database: dict
|
||||
restrict_resource: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreateTableEvent(Event):
|
||||
"""
|
||||
Event name: ``create-table``
|
||||
|
||||
A new table has been created in the database.
|
||||
|
||||
:ivar database: The name of the database where the table was created.
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was created
|
||||
:type table: str
|
||||
:ivar schema: The SQL schema definition for the new table.
|
||||
:type schema: str
|
||||
"""
|
||||
|
||||
name = "create-table"
|
||||
database: str
|
||||
table: str
|
||||
schema: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class DropTableEvent(Event):
|
||||
"""
|
||||
Event name: ``drop-table``
|
||||
|
||||
A table has been dropped from the database.
|
||||
|
||||
:ivar database: The name of the database where the table was dropped.
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was dropped
|
||||
:type table: str
|
||||
"""
|
||||
|
||||
name = "drop-table"
|
||||
database: str
|
||||
table: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class AlterTableEvent(Event):
|
||||
"""
|
||||
Event name: ``alter-table``
|
||||
|
||||
A table has been altered.
|
||||
|
||||
:ivar database: The name of the database where the table was altered
|
||||
:type database: str
|
||||
:ivar table: The name of the table that was altered
|
||||
:type table: str
|
||||
:ivar before_schema: The table's SQL schema before the alteration
|
||||
:type before_schema: str
|
||||
:ivar after_schema: The table's SQL schema after the alteration
|
||||
:type after_schema: str
|
||||
"""
|
||||
|
||||
name = "alter-table"
|
||||
database: str
|
||||
table: str
|
||||
before_schema: str
|
||||
after_schema: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class InsertRowsEvent(Event):
|
||||
"""
|
||||
Event name: ``insert-rows``
|
||||
|
||||
Rows were inserted into a table.
|
||||
|
||||
:ivar database: The name of the database where the rows were inserted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the rows were inserted.
|
||||
:type table: str
|
||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
||||
:type num_rows: int
|
||||
:ivar ignore: Was ignore set?
|
||||
:type ignore: bool
|
||||
:ivar replace: Was replace set?
|
||||
:type replace: bool
|
||||
"""
|
||||
|
||||
name = "insert-rows"
|
||||
database: str
|
||||
table: str
|
||||
num_rows: int
|
||||
ignore: bool
|
||||
replace: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpsertRowsEvent(Event):
|
||||
"""
|
||||
Event name: ``upsert-rows``
|
||||
|
||||
Rows were upserted into a table.
|
||||
|
||||
:ivar database: The name of the database where the rows were inserted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the rows were inserted.
|
||||
:type table: str
|
||||
:ivar num_rows: The number of rows that were requested to be inserted.
|
||||
:type num_rows: int
|
||||
"""
|
||||
|
||||
name = "upsert-rows"
|
||||
database: str
|
||||
table: str
|
||||
num_rows: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdateRowEvent(Event):
|
||||
"""
|
||||
Event name: ``update-row``
|
||||
|
||||
A row was updated in a table.
|
||||
|
||||
:ivar database: The name of the database where the row was updated.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the row was updated.
|
||||
:type table: str
|
||||
:ivar pks: The primary key values of the updated row.
|
||||
"""
|
||||
|
||||
name = "update-row"
|
||||
database: str
|
||||
table: str
|
||||
pks: list
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeleteRowEvent(Event):
|
||||
"""
|
||||
Event name: ``delete-row``
|
||||
|
||||
A row was deleted from a table.
|
||||
|
||||
:ivar database: The name of the database where the row was deleted.
|
||||
:type database: str
|
||||
:ivar table: The name of the table where the row was deleted.
|
||||
:type table: str
|
||||
:ivar pks: The primary key values of the deleted row.
|
||||
"""
|
||||
|
||||
name = "delete-row"
|
||||
database: str
|
||||
table: str
|
||||
pks: list
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_events():
|
||||
return [
|
||||
LoginEvent,
|
||||
LogoutEvent,
|
||||
CreateTableEvent,
|
||||
CreateTokenEvent,
|
||||
AlterTableEvent,
|
||||
DropTableEvent,
|
||||
InsertRowsEvent,
|
||||
UpsertRowsEvent,
|
||||
UpdateRowEvent,
|
||||
DeleteRowEvent,
|
||||
]
|
|
@ -11,8 +11,8 @@ from datasette.utils import (
|
|||
)
|
||||
|
||||
|
||||
def load_facet_configs(request, table_metadata):
|
||||
# Given a request and the metadata configuration for a table, return
|
||||
def load_facet_configs(request, table_config):
|
||||
# Given a request and the configuration for a table, return
|
||||
# a dictionary of selected facets, their lists of configs and for each
|
||||
# config whether it came from the request or the metadata.
|
||||
#
|
||||
|
@ -20,21 +20,21 @@ def load_facet_configs(request, table_metadata):
|
|||
# {"source": "metadata", "config": config1},
|
||||
# {"source": "request", "config": config2}]}
|
||||
facet_configs = {}
|
||||
table_metadata = table_metadata or {}
|
||||
metadata_facets = table_metadata.get("facets", [])
|
||||
for metadata_config in metadata_facets:
|
||||
if isinstance(metadata_config, str):
|
||||
table_config = table_config or {}
|
||||
table_facet_configs = table_config.get("facets", [])
|
||||
for facet_config in table_facet_configs:
|
||||
if isinstance(facet_config, str):
|
||||
type = "column"
|
||||
metadata_config = {"simple": metadata_config}
|
||||
facet_config = {"simple": facet_config}
|
||||
else:
|
||||
assert (
|
||||
len(metadata_config.values()) == 1
|
||||
len(facet_config.values()) == 1
|
||||
), "Metadata config dicts should be {type: config}"
|
||||
type, metadata_config = list(metadata_config.items())[0]
|
||||
if isinstance(metadata_config, str):
|
||||
metadata_config = {"simple": metadata_config}
|
||||
type, facet_config = list(facet_config.items())[0]
|
||||
if isinstance(facet_config, str):
|
||||
facet_config = {"simple": facet_config}
|
||||
facet_configs.setdefault(type, []).append(
|
||||
{"source": "metadata", "config": metadata_config}
|
||||
{"source": "metadata", "config": facet_config}
|
||||
)
|
||||
qs_pairs = urllib.parse.parse_qs(request.query_string, keep_blank_values=True)
|
||||
for key, values in qs_pairs.items():
|
||||
|
@ -45,13 +45,12 @@ def load_facet_configs(request, table_metadata):
|
|||
elif key.startswith("_facet_"):
|
||||
type = key[len("_facet_") :]
|
||||
for value in values:
|
||||
# The value is the config - either JSON or not
|
||||
if value.startswith("{"):
|
||||
config = json.loads(value)
|
||||
else:
|
||||
config = {"simple": value}
|
||||
# The value is the facet_config - either JSON or not
|
||||
facet_config = (
|
||||
json.loads(value) if value.startswith("{") else {"simple": value}
|
||||
)
|
||||
facet_configs.setdefault(type, []).append(
|
||||
{"source": "request", "config": config}
|
||||
{"source": "request", "config": facet_config}
|
||||
)
|
||||
return facet_configs
|
||||
|
||||
|
@ -75,7 +74,7 @@ class Facet:
|
|||
sql=None,
|
||||
table=None,
|
||||
params=None,
|
||||
metadata=None,
|
||||
table_config=None,
|
||||
row_count=None,
|
||||
):
|
||||
assert table or sql, "Must provide either table= or sql="
|
||||
|
@ -86,12 +85,12 @@ class Facet:
|
|||
self.table = table
|
||||
self.sql = sql or f"select * from [{table}]"
|
||||
self.params = params or []
|
||||
self.metadata = metadata
|
||||
self.table_config = table_config
|
||||
# row_count can be None, in which case we calculate it ourselves:
|
||||
self.row_count = row_count
|
||||
|
||||
def get_configs(self):
|
||||
configs = load_facet_configs(self.request, self.metadata)
|
||||
configs = load_facet_configs(self.request, self.table_config)
|
||||
return configs.get(self.type) or []
|
||||
|
||||
def get_querystring_pairs(self):
|
||||
|
@ -253,7 +252,7 @@ class ColumnFacet(Facet):
|
|||
# Attempt to expand foreign keys into labels
|
||||
values = [row["value"] for row in facet_rows]
|
||||
expanded = await self.ds.expand_foreign_keys(
|
||||
self.database, self.table, column, values
|
||||
self.request.actor, self.database, self.table, column, values
|
||||
)
|
||||
else:
|
||||
expanded = {}
|
||||
|
|
|
@ -50,7 +50,7 @@ def search_filters(request, database, table, datasette):
|
|||
extra_context = {}
|
||||
|
||||
# Figure out which fts_table to use
|
||||
table_metadata = datasette.table_metadata(database, table)
|
||||
table_metadata = await datasette.table_config(database, table)
|
||||
db = datasette.get_database(database)
|
||||
fts_table = request.args.get("_fts_table")
|
||||
fts_table = fts_table or table_metadata.get("fts_table")
|
||||
|
@ -80,9 +80,9 @@ def search_filters(request, database, table, datasette):
|
|||
"{fts_pk} in (select rowid from {fts_table} where {fts_table} match {match_clause})".format(
|
||||
fts_table=escape_sqlite(fts_table),
|
||||
fts_pk=escape_sqlite(fts_pk),
|
||||
match_clause=":search"
|
||||
if search_mode_raw
|
||||
else "escape_fts(:search)",
|
||||
match_clause=(
|
||||
":search" if search_mode_raw else "escape_fts(:search)"
|
||||
),
|
||||
)
|
||||
)
|
||||
human_descriptions.append(f'search matches "{search}"')
|
||||
|
@ -99,9 +99,11 @@ def search_filters(request, database, table, datasette):
|
|||
"rowid in (select rowid from {fts_table} where {search_col} match {match_clause})".format(
|
||||
fts_table=escape_sqlite(fts_table),
|
||||
search_col=escape_sqlite(search_col),
|
||||
match_clause=":search_{}".format(i)
|
||||
if search_mode_raw
|
||||
else "escape_fts(:search_{})".format(i),
|
||||
match_clause=(
|
||||
":search_{}".format(i)
|
||||
if search_mode_raw
|
||||
else "escape_fts(:search_{})".format(i)
|
||||
),
|
||||
)
|
||||
)
|
||||
human_descriptions.append(
|
||||
|
@ -279,6 +281,13 @@ class Filters:
|
|||
'{c} contains "{v}"',
|
||||
format="%{}%",
|
||||
),
|
||||
TemplatedFilter(
|
||||
"notcontains",
|
||||
"does not contain",
|
||||
'"{c}" not like :{p}',
|
||||
'{c} does not contain "{v}"',
|
||||
format="%{}%",
|
||||
),
|
||||
TemplatedFilter(
|
||||
"endswith",
|
||||
"ends with",
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
from os import stat
|
||||
from datasette import hookimpl, Response
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
from datasette import hookimpl, Response
|
||||
from .utils import await_me_maybe, add_cors_headers
|
||||
from .utils import add_cors_headers
|
||||
from .utils.asgi import (
|
||||
Base400,
|
||||
Forbidden,
|
||||
)
|
||||
from .views.base import DatasetteError
|
||||
from markupsafe import Markup
|
||||
import pdb
|
||||
import traceback
|
||||
from .plugins import pm
|
||||
|
||||
try:
|
||||
import rich
|
||||
|
@ -57,7 +55,8 @@ def handle_exception(datasette, request, exception):
|
|||
if request.path.split("?")[0].endswith(".json"):
|
||||
return Response.json(info, status=status, headers=headers)
|
||||
else:
|
||||
template = datasette.jinja_env.select_template(templates)
|
||||
environment = datasette.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
return Response.html(
|
||||
await template.render_async(
|
||||
dict(
|
||||
|
|
|
@ -94,6 +94,16 @@ def actor_from_request(datasette, request):
|
|||
"""Return an actor dictionary based on the incoming request"""
|
||||
|
||||
|
||||
@hookspec(firstresult=True)
|
||||
def actors_from_ids(datasette, actor_ids):
|
||||
"""Returns a dictionary mapping those IDs to actor dictionaries"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def jinja2_environment_from_request(datasette, request, env):
|
||||
"""Return a Jinja2 environment based on the incoming request"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def filters_from_request(request, database, table, datasette):
|
||||
"""
|
||||
|
@ -130,16 +140,36 @@ def menu_links(datasette, actor, request):
|
|||
"""Links for the navigation menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def row_actions(datasette, actor, request, database, table, row):
|
||||
"""Links for the row actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def table_actions(datasette, actor, database, table, request):
|
||||
"""Links for the table actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def view_actions(datasette, actor, database, view, request):
|
||||
"""Links for the view actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def query_actions(datasette, actor, database, query_name, request, sql, params):
|
||||
"""Links for the query and canned query actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def database_actions(datasette, actor, database, request):
|
||||
"""Links for the database actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def homepage_actions(datasette, actor, request):
|
||||
"""Links for the homepage actions menu"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def skip_csrf(datasette, scope):
|
||||
"""Mechanism for skipping CSRF checks for certain requests"""
|
||||
|
@ -148,3 +178,43 @@ def skip_csrf(datasette, scope):
|
|||
@hookspec
|
||||
def handle_exception(datasette, request, exception):
|
||||
"""Handle an uncaught exception. Can return a Response or None."""
|
||||
|
||||
|
||||
@hookspec
|
||||
def track_event(datasette, event):
|
||||
"""Respond to an event tracked by Datasette"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def register_events(datasette):
|
||||
"""Return a list of Event subclasses to use with track_event()"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_homepage(datasette, request):
|
||||
"""HTML to include at the top of the homepage"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_database(datasette, request, database):
|
||||
"""HTML to include at the top of the database page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_table(datasette, request, database, table):
|
||||
"""HTML to include at the top of the table page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_row(datasette, request, database, table, row):
|
||||
"""HTML to include at the top of the row page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_query(datasette, request, database, sql):
|
||||
"""HTML to include at the top of the query results page"""
|
||||
|
||||
|
||||
@hookspec
|
||||
def top_canned_query(datasette, request, database, query_name):
|
||||
"""HTML to include at the top of the canned query page"""
|
||||
|
|
|
@ -1,6 +1,16 @@
|
|||
import collections
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
Permission = collections.namedtuple(
|
||||
"Permission",
|
||||
("name", "abbr", "description", "takes_database", "takes_resource", "default"),
|
||||
)
|
||||
|
||||
@dataclass
|
||||
class Permission:
|
||||
name: str
|
||||
abbr: Optional[str]
|
||||
description: Optional[str]
|
||||
takes_database: bool
|
||||
takes_resource: bool
|
||||
default: bool
|
||||
# This is deliberately undocumented: it's considered an internal
|
||||
# implementation detail for view-table/view-database and should
|
||||
# not be used by plugins as it may change in the future.
|
||||
implies_can_view: bool = False
|
||||
|
|
|
@ -1,9 +1,20 @@
|
|||
import importlib
|
||||
import os
|
||||
import pluggy
|
||||
import pkg_resources
|
||||
from pprint import pprint
|
||||
import sys
|
||||
from . import hookspecs
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
import importlib.resources as importlib_resources
|
||||
else:
|
||||
import importlib_resources
|
||||
if sys.version_info >= (3, 10):
|
||||
import importlib.metadata as importlib_metadata
|
||||
else:
|
||||
import importlib_metadata
|
||||
|
||||
|
||||
DEFAULT_PLUGINS = (
|
||||
"datasette.publish.heroku",
|
||||
"datasette.publish.cloudrun",
|
||||
|
@ -17,15 +28,59 @@ DEFAULT_PLUGINS = (
|
|||
"datasette.default_menu_links",
|
||||
"datasette.handle_exception",
|
||||
"datasette.forbidden",
|
||||
"datasette.events",
|
||||
)
|
||||
|
||||
pm = pluggy.PluginManager("datasette")
|
||||
pm.add_hookspecs(hookspecs)
|
||||
|
||||
if not hasattr(sys, "_called_from_test"):
|
||||
DATASETTE_TRACE_PLUGINS = os.environ.get("DATASETTE_TRACE_PLUGINS", None)
|
||||
|
||||
|
||||
def before(hook_name, hook_impls, kwargs):
|
||||
print(file=sys.stderr)
|
||||
print(f"{hook_name}:", file=sys.stderr)
|
||||
pprint(kwargs, width=40, indent=4, stream=sys.stderr)
|
||||
print("Hook implementations:", file=sys.stderr)
|
||||
pprint(hook_impls, width=40, indent=4, stream=sys.stderr)
|
||||
|
||||
|
||||
def after(outcome, hook_name, hook_impls, kwargs):
|
||||
results = outcome.get_result()
|
||||
if not isinstance(results, list):
|
||||
results = [results]
|
||||
print(f"Results:", file=sys.stderr)
|
||||
pprint(results, width=40, indent=4, stream=sys.stderr)
|
||||
|
||||
|
||||
if DATASETTE_TRACE_PLUGINS:
|
||||
pm.add_hookcall_monitoring(before, after)
|
||||
|
||||
|
||||
DATASETTE_LOAD_PLUGINS = os.environ.get("DATASETTE_LOAD_PLUGINS", None)
|
||||
|
||||
if not hasattr(sys, "_called_from_test") and DATASETTE_LOAD_PLUGINS is None:
|
||||
# Only load plugins if not running tests
|
||||
pm.load_setuptools_entrypoints("datasette")
|
||||
|
||||
# Load any plugins specified in DATASETTE_LOAD_PLUGINS")
|
||||
if DATASETTE_LOAD_PLUGINS is not None:
|
||||
for package_name in [
|
||||
name for name in DATASETTE_LOAD_PLUGINS.split(",") if name.strip()
|
||||
]:
|
||||
try:
|
||||
distribution = importlib_metadata.distribution(package_name)
|
||||
entry_points = distribution.entry_points
|
||||
for entry_point in entry_points:
|
||||
if entry_point.group == "datasette":
|
||||
mod = entry_point.load()
|
||||
pm.register(mod, name=entry_point.name)
|
||||
# Ensure name can be found in plugin_to_distinfo later:
|
||||
pm._plugin_distinfo.append((mod, distribution))
|
||||
except importlib_metadata.PackageNotFoundError:
|
||||
sys.stderr.write("Plugin {} could not be found\n".format(package_name))
|
||||
|
||||
|
||||
# Load default plugins
|
||||
for plugin in DEFAULT_PLUGINS:
|
||||
mod = importlib.import_module(plugin)
|
||||
|
@ -40,16 +95,16 @@ def get_plugins():
|
|||
templates_path = None
|
||||
if plugin.__name__ not in DEFAULT_PLUGINS:
|
||||
try:
|
||||
if pkg_resources.resource_isdir(plugin.__name__, "static"):
|
||||
static_path = pkg_resources.resource_filename(
|
||||
plugin.__name__, "static"
|
||||
if (importlib_resources.files(plugin.__name__) / "static").is_dir():
|
||||
static_path = str(
|
||||
importlib_resources.files(plugin.__name__) / "static"
|
||||
)
|
||||
if pkg_resources.resource_isdir(plugin.__name__, "templates"):
|
||||
templates_path = pkg_resources.resource_filename(
|
||||
plugin.__name__, "templates"
|
||||
if (importlib_resources.files(plugin.__name__) / "templates").is_dir():
|
||||
templates_path = str(
|
||||
importlib_resources.files(plugin.__name__) / "templates"
|
||||
)
|
||||
except (KeyError, ImportError):
|
||||
# Caused by --plugins_dir= plugins - KeyError/ImportError thrown in Py3.5
|
||||
except (TypeError, ModuleNotFoundError):
|
||||
# Caused by --plugins_dir= plugins
|
||||
pass
|
||||
plugin_info = {
|
||||
"name": plugin.__name__,
|
||||
|
@ -60,6 +115,6 @@ def get_plugins():
|
|||
distinfo = plugin_to_distinfo.get(plugin)
|
||||
if distinfo:
|
||||
plugin_info["version"] = distinfo.version
|
||||
plugin_info["name"] = distinfo.project_name
|
||||
plugin_info["name"] = distinfo.name or distinfo.project_name
|
||||
plugins.append(plugin_info)
|
||||
return plugins
|
||||
|
|
|
@ -68,7 +68,7 @@ def json_renderer(request, args, data, error, truncated=None):
|
|||
elif shape in ("objects", "object", "array"):
|
||||
columns = data.get("columns")
|
||||
rows = data.get("rows")
|
||||
if rows and columns:
|
||||
if rows and columns and not isinstance(rows[0], dict):
|
||||
data["rows"] = [dict(zip(columns, row)) for row in rows]
|
||||
if shape == "object":
|
||||
shape_error = None
|
||||
|
|
|
@ -163,28 +163,22 @@ h6,
|
|||
}
|
||||
|
||||
.page-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding-left: 10px;
|
||||
border-left: 10px solid #666;
|
||||
margin-bottom: 0.75rem;
|
||||
margin-top: 1rem;
|
||||
}
|
||||
.page-header h1 {
|
||||
display: inline;
|
||||
margin: 0;
|
||||
font-size: 2rem;
|
||||
padding-right: 0.2em;
|
||||
}
|
||||
.page-header details {
|
||||
display: inline-flex;
|
||||
}
|
||||
.page-header details > summary {
|
||||
|
||||
.page-action-menu details > summary {
|
||||
list-style: none;
|
||||
display: inline-flex;
|
||||
cursor: pointer;
|
||||
}
|
||||
.page-header details > summary::-webkit-details-marker {
|
||||
.page-action-menu details > summary::-webkit-details-marker {
|
||||
display: none;
|
||||
}
|
||||
|
||||
|
@ -275,6 +269,7 @@ header,
|
|||
footer {
|
||||
padding: 0.6rem 1rem 0.5rem 1rem;
|
||||
background-color: #276890;
|
||||
background: linear-gradient(180deg, rgba(96,144,173,1) 0%, rgba(39,104,144,1) 50%);
|
||||
color: rgba(255,255,244,0.9);
|
||||
overflow: hidden;
|
||||
box-sizing: border-box;
|
||||
|
@ -352,25 +347,58 @@ details.nav-menu > summary::-webkit-details-marker {
|
|||
}
|
||||
details .nav-menu-inner {
|
||||
position: absolute;
|
||||
top: 2rem;
|
||||
top: 2.6rem;
|
||||
right: 10px;
|
||||
width: 180px;
|
||||
background-color: #276890;
|
||||
padding: 1rem;
|
||||
z-index: 1000;
|
||||
padding: 0;
|
||||
}
|
||||
.nav-menu-inner li,
|
||||
form.nav-menu-logout {
|
||||
padding: 0.3rem 0.5rem;
|
||||
border-top: 1px solid #ffffff69;
|
||||
}
|
||||
.nav-menu-inner a {
|
||||
display: block;
|
||||
}
|
||||
|
||||
/* Table/database actions menu */
|
||||
.page-header {
|
||||
.page-action-menu {
|
||||
position: relative;
|
||||
margin-bottom: 0.5em;
|
||||
}
|
||||
.actions-menu-links {
|
||||
display: inline;
|
||||
}
|
||||
.actions-menu-links .dropdown-menu {
|
||||
position: absolute;
|
||||
top: calc(100% + 10px);
|
||||
left: -10px;
|
||||
left: 0;
|
||||
}
|
||||
.page-action-menu .icon-text {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
border-radius: .25rem;
|
||||
padding: 5px 12px 3px 7px;
|
||||
color: #fff;
|
||||
font-weight: 400;
|
||||
font-size: 0.8em;
|
||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
||||
border-color: #007bff;
|
||||
}
|
||||
.page-action-menu .icon-text span {
|
||||
/* Nudge text up a bit */
|
||||
position: relative;
|
||||
top: -2px;
|
||||
}
|
||||
.page-action-menu .icon-text:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
.page-action-menu .icon {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
/* Components ============================================================== */
|
||||
|
@ -482,20 +510,18 @@ form.sql textarea {
|
|||
font-family: monospace;
|
||||
font-size: 1.3em;
|
||||
}
|
||||
form.sql label {
|
||||
width: 15%;
|
||||
}
|
||||
form label {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
width: 15%;
|
||||
}
|
||||
.advanced-export form label {
|
||||
width: auto;
|
||||
}
|
||||
.advanced-export input[type=submit] {
|
||||
font-size: 0.6em;
|
||||
margin-left: 1em;
|
||||
}
|
||||
label.sort_by_desc {
|
||||
width: auto;
|
||||
padding-right: 1em;
|
||||
}
|
||||
pre#sql-query {
|
||||
|
@ -538,7 +564,7 @@ form input[type=submit], form button[type=button] {
|
|||
|
||||
form input[type=submit] {
|
||||
color: #fff;
|
||||
background-color: #007bff;
|
||||
background: linear-gradient(180deg, #007bff 0%, #4E79C7 100%);
|
||||
border-color: #007bff;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
@ -821,6 +847,13 @@ svg.dropdown-menu-icon {
|
|||
.dropdown-menu a:hover {
|
||||
background-color: #eee;
|
||||
}
|
||||
.dropdown-menu .dropdown-description {
|
||||
margin: 0;
|
||||
color: #666;
|
||||
font-size: 0.8em;
|
||||
max-width: 80vw;
|
||||
white-space: normal;
|
||||
}
|
||||
.dropdown-menu .hook {
|
||||
display: block;
|
||||
position: absolute;
|
||||
|
|
|
@ -0,0 +1,210 @@
|
|||
// Custom events for use with the native CustomEvent API
|
||||
const DATASETTE_EVENTS = {
|
||||
INIT: "datasette_init", // returns datasette manager instance in evt.detail
|
||||
};
|
||||
|
||||
// Datasette "core" -> Methods/APIs that are foundational
|
||||
// Plugins will have greater stability if they use the functional hooks- but if they do decide to hook into
|
||||
// literal DOM selectors, they'll have an easier time using these addresses.
|
||||
const DOM_SELECTORS = {
|
||||
/** Should have one match */
|
||||
jsonExportLink: ".export-links a[href*=json]",
|
||||
|
||||
/** Event listeners that go outside of the main table, e.g. existing scroll listener */
|
||||
tableWrapper: ".table-wrapper",
|
||||
table: "table.rows-and-columns",
|
||||
aboveTablePanel: ".above-table-panel",
|
||||
|
||||
// These could have multiple matches
|
||||
/** Used for selecting table headers. Use makeColumnActions if you want to add menu items. */
|
||||
tableHeaders: `table.rows-and-columns th`,
|
||||
|
||||
/** Used to add "where" clauses to query using direct manipulation */
|
||||
filterRows: ".filter-row",
|
||||
/** Used to show top available enum values for a column ("facets") */
|
||||
facetResults: ".facet-results [data-column]",
|
||||
};
|
||||
|
||||
/**
|
||||
* Monolith class for interacting with Datasette JS API
|
||||
* Imported with DEFER, runs after main document parsed
|
||||
* For now, manually synced with datasette/version.py
|
||||
*/
|
||||
const datasetteManager = {
|
||||
VERSION: window.datasetteVersion,
|
||||
|
||||
// TODO: Should order of registration matter more?
|
||||
|
||||
// Should plugins be allowed to clobber others or is it last-in takes priority?
|
||||
// Does pluginMetadata need to be serializable, or can we let it be stateful / have functions?
|
||||
plugins: new Map(),
|
||||
|
||||
registerPlugin: (name, pluginMetadata) => {
|
||||
if (datasetteManager.plugins.has(name)) {
|
||||
console.warn(`Warning -> plugin ${name} was redefined`);
|
||||
}
|
||||
datasetteManager.plugins.set(name, pluginMetadata);
|
||||
|
||||
// If the plugin participates in the panel... update the panel.
|
||||
if (pluginMetadata.makeAboveTablePanelConfigs) {
|
||||
datasetteManager.renderAboveTablePanel();
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* New DOM elements are created on each click, so the data is not stale.
|
||||
*
|
||||
* Items
|
||||
* - must provide label (text)
|
||||
* - might provide href (string) or an onclick ((evt) => void)
|
||||
*
|
||||
* columnMeta is metadata stored on the column header (TH) as a DOMStringMap
|
||||
* - column: string
|
||||
* - columnNotNull: boolean
|
||||
* - columnType: sqlite datatype enum (text, number, etc)
|
||||
* - isPk: boolean
|
||||
*/
|
||||
makeColumnActions: (columnMeta) => {
|
||||
let columnActions = [];
|
||||
|
||||
// Accept function that returns list of columnActions with keys
|
||||
// Required: label (text)
|
||||
// Optional: onClick or href
|
||||
datasetteManager.plugins.forEach((plugin) => {
|
||||
if (plugin.makeColumnActions) {
|
||||
// Plugins can provide multiple columnActions if they want
|
||||
// If multiple try to create entry with same label, the last one deletes the others
|
||||
columnActions.push(...plugin.makeColumnActions(columnMeta));
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: Validate columnAction configs and give informative error message if missing keys.
|
||||
return columnActions;
|
||||
},
|
||||
|
||||
/**
|
||||
* In MVP, each plugin can only have 1 instance.
|
||||
* In future, panels could be repeated. We omit that for now since so many plugins depend on
|
||||
* shared URL state, so having multiple instances of plugin at same time is problematic.
|
||||
* Currently, we never destroy any panels, we just hide them.
|
||||
*
|
||||
* TODO: nicer panel css, show panel selection state.
|
||||
* TODO: does this hook need to take any arguments?
|
||||
*/
|
||||
renderAboveTablePanel: () => {
|
||||
const aboveTablePanel = document.querySelector(
|
||||
DOM_SELECTORS.aboveTablePanel
|
||||
);
|
||||
|
||||
if (!aboveTablePanel) {
|
||||
console.warn(
|
||||
"This page does not have a table, the renderAboveTablePanel cannot be used."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let aboveTablePanelWrapper = aboveTablePanel.querySelector(".panels");
|
||||
|
||||
// First render: create wrappers. Otherwise, reuse previous.
|
||||
if (!aboveTablePanelWrapper) {
|
||||
aboveTablePanelWrapper = document.createElement("div");
|
||||
aboveTablePanelWrapper.classList.add("tab-contents");
|
||||
const panelNav = document.createElement("div");
|
||||
panelNav.classList.add("tab-controls");
|
||||
|
||||
// Temporary: css for minimal amount of breathing room.
|
||||
panelNav.style.display = "flex";
|
||||
panelNav.style.gap = "8px";
|
||||
panelNav.style.marginTop = "4px";
|
||||
panelNav.style.marginBottom = "20px";
|
||||
|
||||
aboveTablePanel.appendChild(panelNav);
|
||||
aboveTablePanel.appendChild(aboveTablePanelWrapper);
|
||||
}
|
||||
|
||||
datasetteManager.plugins.forEach((plugin, pluginName) => {
|
||||
const { makeAboveTablePanelConfigs } = plugin;
|
||||
|
||||
if (makeAboveTablePanelConfigs) {
|
||||
const controls = aboveTablePanel.querySelector(".tab-controls");
|
||||
const contents = aboveTablePanel.querySelector(".tab-contents");
|
||||
|
||||
// Each plugin can make multiple panels
|
||||
const configs = makeAboveTablePanelConfigs();
|
||||
|
||||
configs.forEach((config, i) => {
|
||||
const nodeContentId = `${pluginName}_${config.id}_panel-content`;
|
||||
|
||||
// quit if we've already registered this plugin
|
||||
// TODO: look into whether plugins should be allowed to ask
|
||||
// parent to re-render, or if they should manage that internally.
|
||||
if (document.getElementById(nodeContentId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Add tab control button
|
||||
const pluginControl = document.createElement("button");
|
||||
pluginControl.textContent = config.label;
|
||||
pluginControl.onclick = () => {
|
||||
contents.childNodes.forEach((node) => {
|
||||
if (node.id === nodeContentId) {
|
||||
node.style.display = "block";
|
||||
} else {
|
||||
node.style.display = "none";
|
||||
}
|
||||
});
|
||||
};
|
||||
controls.appendChild(pluginControl);
|
||||
|
||||
// Add plugin content area
|
||||
const pluginNode = document.createElement("div");
|
||||
pluginNode.id = nodeContentId;
|
||||
config.render(pluginNode);
|
||||
pluginNode.style.display = "none"; // Default to hidden unless you're ifrst
|
||||
|
||||
contents.appendChild(pluginNode);
|
||||
});
|
||||
|
||||
// Let first node be selected by default
|
||||
if (contents.childNodes.length) {
|
||||
contents.childNodes[0].style.display = "block";
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
/** Selectors for document (DOM) elements. Store identifier instead of immediate references in case they haven't loaded when Manager starts. */
|
||||
selectors: DOM_SELECTORS,
|
||||
|
||||
// Future API ideas
|
||||
// Fetch page's data in array, and cache so plugins could reuse it
|
||||
// Provide knowledge of what datasette JS or server-side via traditional console autocomplete
|
||||
// State helpers: URL params https://github.com/simonw/datasette/issues/1144 and localstorage
|
||||
// UI Hooks: command + k, tab manager hook
|
||||
// Should we notify plugins that have dependencies
|
||||
// when all dependencies were fulfilled? (leaflet, codemirror, etc)
|
||||
// https://github.com/simonw/datasette-leaflet -> this way
|
||||
// multiple plugins can all request the same copy of leaflet.
|
||||
};
|
||||
|
||||
const initializeDatasette = () => {
|
||||
// Hide the global behind __ prefix. Ideally they should be listening for the
|
||||
// DATASETTE_EVENTS.INIT event to avoid the habit of reading from the window.
|
||||
|
||||
window.__DATASETTE__ = datasetteManager;
|
||||
console.debug("Datasette Manager Created!");
|
||||
|
||||
const initDatasetteEvent = new CustomEvent(DATASETTE_EVENTS.INIT, {
|
||||
detail: datasetteManager,
|
||||
});
|
||||
|
||||
document.dispatchEvent(initDatasetteEvent);
|
||||
};
|
||||
|
||||
/**
|
||||
* Main function
|
||||
* Fires AFTER the document has been parsed
|
||||
*/
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
initializeDatasette();
|
||||
});
|
|
@ -17,7 +17,8 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>`;
|
||||
|
||||
(function () {
|
||||
/** Main initialization function for Datasette Table interactions */
|
||||
const initDatasetteTable = function (manager) {
|
||||
// Feature detection
|
||||
if (!window.URLSearchParams) {
|
||||
return;
|
||||
|
@ -68,13 +69,11 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
menu.style.display = "none";
|
||||
menu.classList.remove("anim-scale-in");
|
||||
}
|
||||
// When page loads, add scroll listener on .table-wrapper
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
var tableWrapper = document.querySelector(".table-wrapper");
|
||||
if (tableWrapper) {
|
||||
tableWrapper.addEventListener("scroll", closeMenu);
|
||||
}
|
||||
});
|
||||
|
||||
const tableWrapper = document.querySelector(manager.selectors.tableWrapper);
|
||||
if (tableWrapper) {
|
||||
tableWrapper.addEventListener("scroll", closeMenu);
|
||||
}
|
||||
document.body.addEventListener("click", (ev) => {
|
||||
/* was this click outside the menu? */
|
||||
var target = ev.target;
|
||||
|
@ -85,9 +84,11 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
closeMenu();
|
||||
}
|
||||
});
|
||||
function iconClicked(ev) {
|
||||
|
||||
function onTableHeaderClick(ev) {
|
||||
ev.preventDefault();
|
||||
ev.stopPropagation();
|
||||
menu.innerHTML = DROPDOWN_HTML;
|
||||
var th = ev.target;
|
||||
while (th.nodeName != "TH") {
|
||||
th = th.parentNode;
|
||||
|
@ -185,7 +186,59 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
menu.style.left = menuLeft + "px";
|
||||
menu.style.display = "block";
|
||||
menu.classList.add("anim-scale-in");
|
||||
|
||||
// Custom menu items on each render
|
||||
// Plugin hook: allow adding JS-based additional menu items
|
||||
const columnActionsPayload = {
|
||||
columnName: th.dataset.column,
|
||||
columnNotNull: th.dataset.columnNotNull === '1',
|
||||
columnType: th.dataset.columnType,
|
||||
isPk: th.dataset.isPk === '1'
|
||||
};
|
||||
const columnItemConfigs = manager.makeColumnActions(columnActionsPayload);
|
||||
|
||||
const menuList = menu.querySelector('ul');
|
||||
columnItemConfigs.forEach(itemConfig => {
|
||||
// Remove items from previous render. We assume entries have unique labels.
|
||||
const existingItems = menuList.querySelectorAll(`li`);
|
||||
Array.from(existingItems).filter(item => item.innerText === itemConfig.label).forEach(node => {
|
||||
node.remove();
|
||||
});
|
||||
|
||||
const newLink = document.createElement('a');
|
||||
newLink.textContent = itemConfig.label;
|
||||
newLink.href = itemConfig.href ?? '#';
|
||||
if (itemConfig.onClick) {
|
||||
newLink.onclick = itemConfig.onClick;
|
||||
}
|
||||
|
||||
// Attach new elements to DOM
|
||||
const menuItem = document.createElement('li');
|
||||
menuItem.appendChild(newLink);
|
||||
menuList.appendChild(menuItem);
|
||||
});
|
||||
|
||||
// Measure width of menu and adjust position if too far right
|
||||
const menuWidth = menu.offsetWidth;
|
||||
const windowWidth = window.innerWidth;
|
||||
if (menuLeft + menuWidth > windowWidth) {
|
||||
menu.style.left = windowWidth - menuWidth - 20 + "px";
|
||||
}
|
||||
// Align menu .hook arrow with the column cog icon
|
||||
const hook = menu.querySelector('.hook');
|
||||
const icon = th.querySelector('.dropdown-menu-icon');
|
||||
const iconRect = icon.getBoundingClientRect();
|
||||
const hookLeft = (iconRect.left - menuLeft + 1) + 'px';
|
||||
hook.style.left = hookLeft;
|
||||
// Move the whole menu right if the hook is too far right
|
||||
const menuRect = menu.getBoundingClientRect();
|
||||
if (iconRect.right > menuRect.right) {
|
||||
menu.style.left = (iconRect.right - menuWidth) + 'px';
|
||||
// And move hook tip as well
|
||||
hook.style.left = (menuWidth - 13) + 'px';
|
||||
}
|
||||
}
|
||||
|
||||
var svg = document.createElement("div");
|
||||
svg.innerHTML = DROPDOWN_ICON_SVG;
|
||||
svg = svg.querySelector("*");
|
||||
|
@ -197,21 +250,21 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
menu.style.display = "none";
|
||||
document.body.appendChild(menu);
|
||||
|
||||
var ths = Array.from(document.querySelectorAll(".rows-and-columns th"));
|
||||
var ths = Array.from(document.querySelectorAll(manager.selectors.tableHeaders));
|
||||
ths.forEach((th) => {
|
||||
if (!th.querySelector("a")) {
|
||||
return;
|
||||
}
|
||||
var icon = svg.cloneNode(true);
|
||||
icon.addEventListener("click", iconClicked);
|
||||
icon.addEventListener("click", onTableHeaderClick);
|
||||
th.appendChild(icon);
|
||||
});
|
||||
})();
|
||||
};
|
||||
|
||||
/* Add x buttons to the filter rows */
|
||||
(function () {
|
||||
function addButtonsToFilterRows(manager) {
|
||||
var x = "✖";
|
||||
var rows = Array.from(document.querySelectorAll(".filter-row")).filter((el) =>
|
||||
var rows = Array.from(document.querySelectorAll(manager.selectors.filterRow)).filter((el) =>
|
||||
el.querySelector(".filter-op")
|
||||
);
|
||||
rows.forEach((row) => {
|
||||
|
@ -234,13 +287,13 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
a.style.display = "none";
|
||||
}
|
||||
});
|
||||
})();
|
||||
};
|
||||
|
||||
/* Set up datalist autocomplete for filter values */
|
||||
(function () {
|
||||
function initAutocompleteForFilterValues(manager) {
|
||||
function createDataLists() {
|
||||
var facetResults = document.querySelectorAll(
|
||||
".facet-results [data-column]"
|
||||
manager.selectors.facetResults
|
||||
);
|
||||
Array.from(facetResults).forEach(function (facetResult) {
|
||||
// Use link text from all links in the facet result
|
||||
|
@ -266,9 +319,21 @@ var DROPDOWN_ICON_SVG = `<svg xmlns="http://www.w3.org/2000/svg" width="14" heig
|
|||
document.body.addEventListener("change", function (event) {
|
||||
if (event.target.name === "_filter_column") {
|
||||
event.target
|
||||
.closest(".filter-row")
|
||||
.closest(manager.selectors.filterRow)
|
||||
.querySelector(".filter-value")
|
||||
.setAttribute("list", "datalist-" + event.target.value);
|
||||
}
|
||||
});
|
||||
})();
|
||||
};
|
||||
|
||||
// Ensures Table UI is initialized only after the Manager is ready.
|
||||
document.addEventListener("datasette_init", function (evt) {
|
||||
const { detail: manager } = evt;
|
||||
|
||||
// Main table
|
||||
initDatasetteTable(manager);
|
||||
|
||||
// Other UI functions with interactive JS needs
|
||||
addButtonsToFilterRows(manager);
|
||||
initAutocompleteForFilterValues(manager);
|
||||
});
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
{% if action_links %}
|
||||
<div class="page-action-menu">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary>
|
||||
<div class="icon-text">
|
||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">{{ action_title }}</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>
|
||||
<span>{{ action_title }}</span>
|
||||
</div>
|
||||
</summary>
|
||||
<div class="dropdown-menu">
|
||||
<div class="hook"></div>
|
||||
<ul>
|
||||
{% for link in action_links %}
|
||||
<li><a href="{{ link.href }}">{{ link.label }}
|
||||
{% if link.description %}
|
||||
<p class="dropdown-description">{{ link.description }}</p>
|
||||
{% endif %}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
</div>
|
||||
{% endif %}
|
|
@ -1,3 +1,5 @@
|
|||
<!-- above-table-panel is a hook node for plugins to attach to . Displays even if no data available -->
|
||||
<div class="above-table-panel"> </div>
|
||||
{% if display_rows %}
|
||||
<div class="table-wrapper">
|
||||
<table class="rows-and-columns">
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
{% block content %}
|
||||
|
||||
<h1>API Explorer</h1>
|
||||
<h1>API Explorer{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
<p>Use this tool to try out the
|
||||
{% if datasette_version %}
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
{% for url in extra_css_urls %}
|
||||
<link rel="stylesheet" href="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}>
|
||||
{% endfor %}
|
||||
<script>window.datasetteVersion = '{{ datasette_version }}';</script>
|
||||
<script src="{{ urls.static('datasette-manager.js') }}" defer></script>
|
||||
{% for url in extra_js_urls %}
|
||||
<script {% if url.module %}type="module" {% endif %}src="{{ url.url }}"{% if url.get("sri") %} integrity="{{ url.sri }}" crossorigin="anonymous"{% endif %}></script>
|
||||
{% endfor %}
|
||||
|
@ -35,7 +37,7 @@
|
|||
</ul>
|
||||
{% endif %}
|
||||
{% if show_logout %}
|
||||
<form action="{{ urls.logout() }}" method="post">
|
||||
<form class="nav-menu-logout" action="{{ urls.logout() }}" method="post">
|
||||
<input type="hidden" name="csrftoken" value="{{ csrftoken() }}">
|
||||
<button class="button-as-link">Log out</button>
|
||||
</form>{% endif %}
|
||||
|
|
|
@ -10,29 +10,13 @@
|
|||
{% block body_class %}db db-{{ database|to_css_class }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header" style="border-color: #{{ database_color(database) }}">
|
||||
<div class="page-header" style="border-color: #{{ database_color }}">
|
||||
<h1>{{ metadata.title or database }}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set links = database_actions() %}{% if links %}
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<div class="dropdown-menu">
|
||||
{% if links %}
|
||||
<ul>
|
||||
{% for link in links %}
|
||||
<li><a href="{{ link.href }}">{{ link.label }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
</details>{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% set action_links, action_title = database_actions(), "Database actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_database() }}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
|
@ -95,7 +79,7 @@
|
|||
{% endif %}
|
||||
|
||||
{% if allow_download %}
|
||||
<p class="download-sqlite">Download SQLite DB: <a href="{{ urls.database(database) }}.db">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p>
|
||||
<p class="download-sqlite">Download SQLite DB: <a href="{{ urls.database(database) }}.db" rel="nofollow">{{ database }}.db</a> <em>{{ format_bytes(size) }}</em></p>
|
||||
{% endif %}
|
||||
|
||||
{% include "_codemirror_foot.html" %}
|
||||
|
|
|
@ -7,6 +7,11 @@
|
|||
{% block content %}
|
||||
<h1>{{ metadata.title or "Datasette" }}{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
{% set action_links, action_title = homepage_actions, "Homepage actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_homepage() }}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
{% for database in databases %}
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
<li><a href="/-/plugins">Installed plugins</a></li>
|
||||
<li><a href="/-/versions">Version info</a></li>
|
||||
</ul>
|
||||
<form action="/-/logout" method="post">
|
||||
<form class="nav-menu-logout" action="/-/logout" method="post">
|
||||
<button class="button-as-link">Log out</button>
|
||||
</form>
|
||||
</div>
|
||||
|
@ -96,18 +96,24 @@
|
|||
<section class="content">
|
||||
<div class="page-header" style="border-color: #ff0000">
|
||||
<h1>fixtures</h1>
|
||||
</div>
|
||||
<div class="page-action-menu">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<summary>
|
||||
<div class="icon-text">
|
||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Database actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>
|
||||
<span>Database actions</span>
|
||||
</div>
|
||||
</summary>
|
||||
<div class="dropdown-menu">
|
||||
<div class="hook"></div>
|
||||
<ul>
|
||||
<li><a href="#">Database action</a></li>
|
||||
<li><a href="#">Action one</a></li>
|
||||
<li><a href="#">Action two</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
|
@ -158,18 +164,24 @@
|
|||
<section class="content">
|
||||
<div class="page-header" style="border-color: #ff0000">
|
||||
<h1>roadside_attraction_characteristics</h1>
|
||||
</div>
|
||||
<div class="page-action-menu">
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<summary>
|
||||
<div class="icon-text">
|
||||
<svg class="icon" aria-labelledby="actions-menu-links-title" role="img" style="color: #fff" xmlns="http://www.w3.org/2000/svg" width="28" height="28" viewBox="0 0 28 28" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Database actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg>
|
||||
<span>Table actions</span>
|
||||
</div>
|
||||
</summary>
|
||||
<div class="dropdown-menu">
|
||||
<div class="hook"></div>
|
||||
<ul>
|
||||
<li><a href="#">Table action</a></li>
|
||||
<li><a href="#">Action one</a></li>
|
||||
<li><a href="#">Action two</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
|
|
|
@ -57,7 +57,7 @@ textarea {
|
|||
<p><label for="permission" style="display:block">Permission</label>
|
||||
<select name="permission" id="permission">
|
||||
{% for permission in permissions %}
|
||||
<option value="{{ permission.0 }}">{{ permission.name }} (default {{ permission.default }})</option>
|
||||
<option value="{{ permission.name }}">{{ permission.name }} (default {{ permission.default }})</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<p><label for="resource_1">Database name</label><input type="text" id="resource_1" name="resource_1"></p>
|
||||
|
@ -71,19 +71,19 @@ textarea {
|
|||
|
||||
<script>
|
||||
var rawPerms = {{ permissions|tojson }};
|
||||
var permissions = Object.fromEntries(rawPerms.map(([label, abbr, needs_resource_1, needs_resource_2, def]) => [label, {needs_resource_1, needs_resource_2, def}]))
|
||||
var permissions = Object.fromEntries(rawPerms.map(p => [p.name, p]));
|
||||
var permissionSelect = document.getElementById('permission');
|
||||
var resource1 = document.getElementById('resource_1');
|
||||
var resource2 = document.getElementById('resource_2');
|
||||
function updateResourceVisibility() {
|
||||
var permission = permissionSelect.value;
|
||||
var {needs_resource_1, needs_resource_2} = permissions[permission];
|
||||
if (needs_resource_1) {
|
||||
var {takes_database, takes_resource} = permissions[permission];
|
||||
if (takes_database) {
|
||||
resource1.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource1.closest('p').style.display = 'none';
|
||||
}
|
||||
if (needs_resource_2) {
|
||||
if (takes_resource) {
|
||||
resource2.closest('p').style.display = 'block';
|
||||
} else {
|
||||
resource2.closest('p').style.display = 'none';
|
||||
|
|
|
@ -28,7 +28,11 @@
|
|||
<p class="message-error">This query cannot be executed because the database is immutable.</p>
|
||||
{% endif %}
|
||||
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color(database) }}">{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color }}">{{ metadata.title or database }}{% if canned_query and not metadata.title %}: {{ canned_query }}{% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set action_links, action_title = query_actions(), "Query actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{% if canned_query %}{{ top_canned_query() }}{% else %}{{ top_query() }}{% endif %}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
|
|
|
@ -20,7 +20,12 @@
|
|||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color(database) }}">{{ table }}: {{ ', '.join(primary_key_values) }}{% if private %} 🔒{% endif %}</h1>
|
||||
<h1 style="padding-left: 10px; border-left: 10px solid #{{ database_color }}">{{ table }}: {{ ', '.join(primary_key_values) }}{% if private %} 🔒{% endif %}</h1>
|
||||
|
||||
{% set action_links, action_title = row_actions, "Row actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_row() }}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
|
|
|
@ -21,29 +21,13 @@
|
|||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="page-header" style="border-color: #{{ database_color(database) }}">
|
||||
<div class="page-header" style="border-color: #{{ database_color }}">
|
||||
<h1>{{ metadata.get("title") or table }}{% if is_view %} (view){% endif %}{% if private %} 🔒{% endif %}</h1>
|
||||
{% set links = table_actions() %}{% if links %}
|
||||
<details class="actions-menu-links details-menu">
|
||||
<summary><svg aria-labelledby="actions-menu-links-title" role="img"
|
||||
style="color: #666" xmlns="http://www.w3.org/2000/svg"
|
||||
width="28" height="28" viewBox="0 0 24 24" fill="none"
|
||||
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<title id="actions-menu-links-title">Table actions</title>
|
||||
<circle cx="12" cy="12" r="3"></circle>
|
||||
<path d="M19.4 15a1.65 1.65 0 0 0 .33 1.82l.06.06a2 2 0 0 1 0 2.83 2 2 0 0 1-2.83 0l-.06-.06a1.65 1.65 0 0 0-1.82-.33 1.65 1.65 0 0 0-1 1.51V21a2 2 0 0 1-2 2 2 2 0 0 1-2-2v-.09A1.65 1.65 0 0 0 9 19.4a1.65 1.65 0 0 0-1.82.33l-.06.06a2 2 0 0 1-2.83 0 2 2 0 0 1 0-2.83l.06-.06a1.65 1.65 0 0 0 .33-1.82 1.65 1.65 0 0 0-1.51-1H3a2 2 0 0 1-2-2 2 2 0 0 1 2-2h.09A1.65 1.65 0 0 0 4.6 9a1.65 1.65 0 0 0-.33-1.82l-.06-.06a2 2 0 0 1 0-2.83 2 2 0 0 1 2.83 0l.06.06a1.65 1.65 0 0 0 1.82.33H9a1.65 1.65 0 0 0 1-1.51V3a2 2 0 0 1 2-2 2 2 0 0 1 2 2v.09a1.65 1.65 0 0 0 1 1.51 1.65 1.65 0 0 0 1.82-.33l.06-.06a2 2 0 0 1 2.83 0 2 2 0 0 1 0 2.83l-.06.06a1.65 1.65 0 0 0-.33 1.82V9a1.65 1.65 0 0 0 1.51 1H21a2 2 0 0 1 2 2 2 2 0 0 1-2 2h-.09a1.65 1.65 0 0 0-1.51 1z"></path>
|
||||
</svg></summary>
|
||||
<div class="dropdown-menu">
|
||||
{% if links %}
|
||||
<ul>
|
||||
{% for link in links %}
|
||||
<li><a href="{{ link.href }}">{{ link.label }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
</details>{% endif %}
|
||||
</div>
|
||||
{% set action_links, action_title = actions(), "View actions" if is_view else "Table actions" %}
|
||||
{% include "_action_menu.html" %}
|
||||
|
||||
{{ top_table() }}
|
||||
|
||||
{% block description_source_license %}{% include "_description_source_license.html" %}{% endblock %}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .utils import tilde_encode, path_with_format, HASH_LENGTH, PrefixedUrlString
|
||||
from .utils import tilde_encode, path_with_format, PrefixedUrlString
|
||||
import urllib
|
||||
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ import asyncio
|
|||
from contextlib import contextmanager
|
||||
import click
|
||||
from collections import OrderedDict, namedtuple, Counter
|
||||
import copy
|
||||
import base64
|
||||
import hashlib
|
||||
import inspect
|
||||
|
@ -17,11 +18,14 @@ import time
|
|||
import types
|
||||
import secrets
|
||||
import shutil
|
||||
from typing import Iterable, List, Tuple
|
||||
import urllib
|
||||
import yaml
|
||||
from .shutil_backport import copytree
|
||||
from .sqlite import sqlite3, supports_table_xinfo
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from datasette.database import Database
|
||||
|
||||
# From https://www.sqlite.org/lang_keywords.html
|
||||
reserved_words = set(
|
||||
|
@ -242,6 +246,7 @@ allowed_pragmas = (
|
|||
"schema_version",
|
||||
"table_info",
|
||||
"table_xinfo",
|
||||
"table_list",
|
||||
)
|
||||
disallawed_sql_res = [
|
||||
(
|
||||
|
@ -402,9 +407,9 @@ def make_dockerfile(
|
|||
apt_get_extras = apt_get_extras_
|
||||
if spatialite:
|
||||
apt_get_extras.extend(["python3-dev", "gcc", "libsqlite3-mod-spatialite"])
|
||||
environment_variables[
|
||||
"SQLITE_EXTENSIONS"
|
||||
] = "/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
|
||||
environment_variables["SQLITE_EXTENSIONS"] = (
|
||||
"/usr/lib/x86_64-linux-gnu/mod_spatialite.so"
|
||||
)
|
||||
return """
|
||||
FROM python:3.11.0-slim-bullseye
|
||||
COPY . /app
|
||||
|
@ -416,9 +421,11 @@ RUN datasette inspect {files} --inspect-file inspect-data.json
|
|||
ENV PORT {port}
|
||||
EXPOSE {port}
|
||||
CMD {cmd}""".format(
|
||||
apt_get_extras=APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
|
||||
if apt_get_extras
|
||||
else "",
|
||||
apt_get_extras=(
|
||||
APT_GET_DOCKERFILE_EXTRAS.format(" ".join(apt_get_extras))
|
||||
if apt_get_extras
|
||||
else ""
|
||||
),
|
||||
environment_variables="\n".join(
|
||||
[
|
||||
"ENV {} '{}'".format(key, value)
|
||||
|
@ -709,7 +716,7 @@ def to_css_class(s):
|
|||
"""
|
||||
if css_class_re.match(s):
|
||||
return s
|
||||
md5_suffix = hashlib.md5(s.encode("utf8")).hexdigest()[:6]
|
||||
md5_suffix = md5_not_usedforsecurity(s)[:6]
|
||||
# Strip leading _, -
|
||||
s = s.lstrip("_").lstrip("-")
|
||||
# Replace any whitespace with hyphens
|
||||
|
@ -1126,7 +1133,13 @@ class StartupError(Exception):
|
|||
_re_named_parameter = re.compile(":([a-zA-Z0-9_]+)")
|
||||
|
||||
|
||||
async def derive_named_parameters(db, sql):
|
||||
@documented
|
||||
async def derive_named_parameters(db: "Database", sql: str) -> List[str]:
|
||||
"""
|
||||
Given a SQL statement, return a list of named parameters that are used in the statement
|
||||
|
||||
e.g. for ``select * from foo where id=:id`` this would return ``["id"]``
|
||||
"""
|
||||
explain = "explain {}".format(sql.strip().rstrip(";"))
|
||||
possible_params = _re_named_parameter.findall(sql)
|
||||
try:
|
||||
|
@ -1219,3 +1232,189 @@ async def row_sql_params_pks(db, table, pk_values):
|
|||
for i, pk_value in enumerate(pk_values):
|
||||
params[f"p{i}"] = pk_value
|
||||
return sql, params, pks
|
||||
|
||||
|
||||
def _handle_pair(key: str, value: str) -> dict:
|
||||
"""
|
||||
Turn a key-value pair into a nested dictionary.
|
||||
foo, bar => {'foo': 'bar'}
|
||||
foo.bar, baz => {'foo': {'bar': 'baz'}}
|
||||
foo.bar, [1, 2, 3] => {'foo': {'bar': [1, 2, 3]}}
|
||||
foo.bar, "baz" => {'foo': {'bar': 'baz'}}
|
||||
foo.bar, '{"baz": "qux"}' => {'foo': {'bar': "{'baz': 'qux'}"}}
|
||||
"""
|
||||
try:
|
||||
value = json.loads(value)
|
||||
except json.JSONDecodeError:
|
||||
# If it doesn't parse as JSON, treat it as a string
|
||||
pass
|
||||
|
||||
keys = key.split(".")
|
||||
result = current_dict = {}
|
||||
|
||||
for k in keys[:-1]:
|
||||
current_dict[k] = {}
|
||||
current_dict = current_dict[k]
|
||||
|
||||
current_dict[keys[-1]] = value
|
||||
return result
|
||||
|
||||
|
||||
def _combine(base: dict, update: dict) -> dict:
|
||||
"""
|
||||
Recursively merge two dictionaries.
|
||||
"""
|
||||
for key, value in update.items():
|
||||
if isinstance(value, dict) and key in base and isinstance(base[key], dict):
|
||||
base[key] = _combine(base[key], value)
|
||||
else:
|
||||
base[key] = value
|
||||
return base
|
||||
|
||||
|
||||
def pairs_to_nested_config(pairs: typing.List[typing.Tuple[str, typing.Any]]) -> dict:
|
||||
"""
|
||||
Parse a list of key-value pairs into a nested dictionary.
|
||||
"""
|
||||
result = {}
|
||||
for key, value in pairs:
|
||||
parsed_pair = _handle_pair(key, value)
|
||||
result = _combine(result, parsed_pair)
|
||||
return result
|
||||
|
||||
|
||||
def make_slot_function(name, datasette, request, **kwargs):
|
||||
from datasette.plugins import pm
|
||||
|
||||
method = getattr(pm.hook, name, None)
|
||||
assert method is not None, "No hook found for {}".format(name)
|
||||
|
||||
async def inner():
|
||||
html_bits = []
|
||||
for hook in method(datasette=datasette, request=request, **kwargs):
|
||||
html = await await_me_maybe(hook)
|
||||
if html is not None:
|
||||
html_bits.append(html)
|
||||
return markupsafe.Markup("".join(html_bits))
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def prune_empty_dicts(d: dict):
|
||||
"""
|
||||
Recursively prune all empty dictionaries from a given dictionary.
|
||||
"""
|
||||
for key, value in list(d.items()):
|
||||
if isinstance(value, dict):
|
||||
prune_empty_dicts(value)
|
||||
if value == {}:
|
||||
d.pop(key, None)
|
||||
|
||||
|
||||
def move_plugins_and_allow(source: dict, destination: dict) -> Tuple[dict, dict]:
|
||||
"""
|
||||
Move 'plugins' and 'allow' keys from source to destination dictionary. Creates
|
||||
hierarchy in destination if needed. After moving, recursively remove any keys
|
||||
in the source that are left empty.
|
||||
"""
|
||||
source = copy.deepcopy(source)
|
||||
destination = copy.deepcopy(destination)
|
||||
|
||||
def recursive_move(src, dest, path=None):
|
||||
if path is None:
|
||||
path = []
|
||||
for key, value in list(src.items()):
|
||||
new_path = path + [key]
|
||||
if key in ("plugins", "allow"):
|
||||
# Navigate and create the hierarchy in destination if needed
|
||||
d = dest
|
||||
for step in path:
|
||||
d = d.setdefault(step, {})
|
||||
# Move the plugins
|
||||
d[key] = value
|
||||
# Remove the plugins from source
|
||||
src.pop(key, None)
|
||||
elif isinstance(value, dict):
|
||||
recursive_move(value, dest, new_path)
|
||||
# After moving, check if the current dictionary is empty and remove it if so
|
||||
if not value:
|
||||
src.pop(key, None)
|
||||
|
||||
recursive_move(source, destination)
|
||||
prune_empty_dicts(source)
|
||||
return source, destination
|
||||
|
||||
|
||||
_table_config_keys = (
|
||||
"hidden",
|
||||
"sort",
|
||||
"sort_desc",
|
||||
"size",
|
||||
"sortable_columns",
|
||||
"label_column",
|
||||
"facets",
|
||||
"fts_table",
|
||||
"fts_pk",
|
||||
"searchmode",
|
||||
"units",
|
||||
)
|
||||
|
||||
|
||||
def move_table_config(metadata: dict, config: dict):
|
||||
"""
|
||||
Move all known table configuration keys from metadata to config.
|
||||
"""
|
||||
if "databases" not in metadata:
|
||||
return metadata, config
|
||||
metadata = copy.deepcopy(metadata)
|
||||
config = copy.deepcopy(config)
|
||||
for database_name, database in metadata["databases"].items():
|
||||
if "tables" not in database:
|
||||
continue
|
||||
for table_name, table in database["tables"].items():
|
||||
for key in _table_config_keys:
|
||||
if key in table:
|
||||
config.setdefault("databases", {}).setdefault(
|
||||
database_name, {}
|
||||
).setdefault("tables", {}).setdefault(table_name, {})[
|
||||
key
|
||||
] = table.pop(
|
||||
key
|
||||
)
|
||||
prune_empty_dicts(metadata)
|
||||
return metadata, config
|
||||
|
||||
|
||||
def redact_keys(original: dict, key_patterns: Iterable) -> dict:
|
||||
"""
|
||||
Recursively redact sensitive keys in a dictionary based on given patterns
|
||||
|
||||
:param original: The original dictionary
|
||||
:param key_patterns: A list of substring patterns to redact
|
||||
:return: A copy of the original dictionary with sensitive values redacted
|
||||
"""
|
||||
|
||||
def redact(data):
|
||||
if isinstance(data, dict):
|
||||
return {
|
||||
k: (
|
||||
redact(v)
|
||||
if not any(pattern in k for pattern in key_patterns)
|
||||
else "***"
|
||||
)
|
||||
for k, v in data.items()
|
||||
}
|
||||
elif isinstance(data, list):
|
||||
return [redact(item) for item in data]
|
||||
else:
|
||||
return data
|
||||
|
||||
return redact(original)
|
||||
|
||||
|
||||
def md5_not_usedforsecurity(s):
|
||||
try:
|
||||
return hashlib.md5(s.encode("utf8"), usedforsecurity=False).hexdigest()
|
||||
except TypeError:
|
||||
# For Python 3.8 which does not support usedforsecurity=False
|
||||
return hashlib.md5(s.encode("utf8")).hexdigest()
|
||||
|
|
|
@ -5,13 +5,13 @@ from datasette.utils import table_column_details
|
|||
async def init_internal_db(db):
|
||||
create_tables_sql = textwrap.dedent(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS databases (
|
||||
CREATE TABLE IF NOT EXISTS catalog_databases (
|
||||
database_name TEXT PRIMARY KEY,
|
||||
path TEXT,
|
||||
is_memory INTEGER,
|
||||
schema_version INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS tables (
|
||||
CREATE TABLE IF NOT EXISTS catalog_tables (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
rootpage INTEGER,
|
||||
|
@ -19,7 +19,7 @@ async def init_internal_db(db):
|
|||
PRIMARY KEY (database_name, table_name),
|
||||
FOREIGN KEY (database_name) REFERENCES databases(database_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS columns (
|
||||
CREATE TABLE IF NOT EXISTS catalog_columns (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
cid INTEGER,
|
||||
|
@ -33,7 +33,7 @@ async def init_internal_db(db):
|
|||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS indexes (
|
||||
CREATE TABLE IF NOT EXISTS catalog_indexes (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
seq INTEGER,
|
||||
|
@ -45,7 +45,7 @@ async def init_internal_db(db):
|
|||
FOREIGN KEY (database_name) REFERENCES databases(database_name),
|
||||
FOREIGN KEY (database_name, table_name) REFERENCES tables(database_name, table_name)
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS foreign_keys (
|
||||
CREATE TABLE IF NOT EXISTS catalog_foreign_keys (
|
||||
database_name TEXT,
|
||||
table_name TEXT,
|
||||
id INTEGER,
|
||||
|
@ -69,12 +69,19 @@ async def populate_schema_tables(internal_db, db):
|
|||
database_name = db.name
|
||||
|
||||
def delete_everything(conn):
|
||||
conn.execute("DELETE FROM tables WHERE database_name = ?", [database_name])
|
||||
conn.execute("DELETE FROM columns WHERE database_name = ?", [database_name])
|
||||
conn.execute(
|
||||
"DELETE FROM foreign_keys WHERE database_name = ?", [database_name]
|
||||
"DELETE FROM catalog_tables WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_columns WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_foreign_keys WHERE database_name = ?",
|
||||
[database_name],
|
||||
)
|
||||
conn.execute(
|
||||
"DELETE FROM catalog_indexes WHERE database_name = ?", [database_name]
|
||||
)
|
||||
conn.execute("DELETE FROM indexes WHERE database_name = ?", [database_name])
|
||||
|
||||
await internal_db.execute_write_fn(delete_everything)
|
||||
|
||||
|
@ -133,14 +140,14 @@ async def populate_schema_tables(internal_db, db):
|
|||
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO tables (database_name, table_name, rootpage, sql)
|
||||
INSERT INTO catalog_tables (database_name, table_name, rootpage, sql)
|
||||
values (?, ?, ?, ?)
|
||||
""",
|
||||
tables_to_insert,
|
||||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO columns (
|
||||
INSERT INTO catalog_columns (
|
||||
database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden
|
||||
) VALUES (
|
||||
:database_name, :table_name, :cid, :name, :type, :notnull, :default_value, :is_pk, :hidden
|
||||
|
@ -150,7 +157,7 @@ async def populate_schema_tables(internal_db, db):
|
|||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO foreign_keys (
|
||||
INSERT INTO catalog_foreign_keys (
|
||||
database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match
|
||||
) VALUES (
|
||||
:database_name, :table_name, :id, :seq, :table, :from, :to, :on_update, :on_delete, :match
|
||||
|
@ -160,7 +167,7 @@ async def populate_schema_tables(internal_db, db):
|
|||
)
|
||||
await internal_db.execute_write_many(
|
||||
"""
|
||||
INSERT INTO indexes (
|
||||
INSERT INTO catalog_indexes (
|
||||
database_name, table_name, seq, name, "unique", origin, partial
|
||||
) VALUES (
|
||||
:database_name, :table_name, :seq, :name, :unique, :origin, :partial
|
||||
|
|
|
@ -4,6 +4,7 @@ Backported from Python 3.8.
|
|||
This code is licensed under the Python License:
|
||||
https://github.com/python/cpython/blob/v3.8.3/LICENSE
|
||||
"""
|
||||
|
||||
import os
|
||||
from shutil import copy, copy2, copystat, Error
|
||||
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = "1.0a3"
|
||||
__version__ = "1.0a13"
|
||||
__version_info__ = tuple(__version__.split("."))
|
||||
|
|
|
@ -10,7 +10,6 @@ from markupsafe import escape
|
|||
|
||||
import pint
|
||||
|
||||
from datasette import __version__
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.utils.asgi import Request
|
||||
from datasette.utils import (
|
||||
|
@ -102,9 +101,6 @@ class BaseView:
|
|||
response.body = b""
|
||||
return response
|
||||
|
||||
def database_color(self, database):
|
||||
return "ff0000"
|
||||
|
||||
async def method_not_allowed(self, request):
|
||||
if (
|
||||
request.path.endswith(".json")
|
||||
|
@ -146,11 +142,11 @@ class BaseView:
|
|||
|
||||
async def render(self, templates, request, context=None):
|
||||
context = context or {}
|
||||
template = self.ds.jinja_env.select_template(templates)
|
||||
environment = self.ds.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
template_context = {
|
||||
**context,
|
||||
**{
|
||||
"database_color": self.database_color,
|
||||
"select_templates": [
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
|
@ -488,7 +484,6 @@ async def stream_csv(datasette, fetch_data, request, database):
|
|||
|
||||
async def stream_fn(r):
|
||||
nonlocal data, trace
|
||||
print("max_csv_mb", datasette.setting("max_csv_mb"))
|
||||
limited_writer = LimitedWriter(r, datasette.setting("max_csv_mb"))
|
||||
if trace:
|
||||
await limited_writer.write(preamble)
|
||||
|
@ -558,16 +553,18 @@ async def stream_csv(datasette, fetch_data, request, database):
|
|||
if cell is None:
|
||||
new_row.extend(("", ""))
|
||||
else:
|
||||
assert isinstance(cell, dict)
|
||||
new_row.append(cell["value"])
|
||||
new_row.append(cell["label"])
|
||||
if not isinstance(cell, dict):
|
||||
new_row.extend((cell, ""))
|
||||
else:
|
||||
new_row.append(cell["value"])
|
||||
new_row.append(cell["label"])
|
||||
else:
|
||||
new_row.append(cell)
|
||||
await writer.writerow(new_row)
|
||||
except Exception as e:
|
||||
sys.stderr.write("Caught this error: {}\n".format(e))
|
||||
except Exception as ex:
|
||||
sys.stderr.write("Caught this error: {}\n".format(ex))
|
||||
sys.stderr.flush()
|
||||
await r.write(str(e))
|
||||
await r.write(str(ex))
|
||||
return
|
||||
await limited_writer.write(postamble)
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from dataclasses import dataclass, field
|
||||
from typing import Callable
|
||||
from urllib.parse import parse_qsl, urlencode
|
||||
import asyncio
|
||||
import hashlib
|
||||
|
@ -10,7 +9,9 @@ import os
|
|||
import re
|
||||
import sqlite_utils
|
||||
import textwrap
|
||||
from typing import List
|
||||
|
||||
from datasette.events import AlterTableEvent, CreateTableEvent, InsertRowsEvent
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
|
@ -18,6 +19,7 @@ from datasette.utils import (
|
|||
call_with_supported_arguments,
|
||||
derive_named_parameters,
|
||||
format_bytes,
|
||||
make_slot_function,
|
||||
tilde_decode,
|
||||
to_css_class,
|
||||
validate_sql_select,
|
||||
|
@ -126,9 +128,9 @@ class DatabaseView(View):
|
|||
"views": sql_views,
|
||||
"queries": canned_queries,
|
||||
"allow_execute_sql": allow_execute_sql,
|
||||
"table_columns": await _table_columns(datasette, database)
|
||||
if allow_execute_sql
|
||||
else {},
|
||||
"table_columns": (
|
||||
await _table_columns(datasette, database) if allow_execute_sql else {}
|
||||
),
|
||||
}
|
||||
|
||||
if format_ == "json":
|
||||
|
@ -143,9 +145,11 @@ class DatabaseView(View):
|
|||
datasette.urls.path(path_with_format(request=request, format="json")),
|
||||
)
|
||||
templates = (f"database-{to_css_class(database)}.html", "database.html")
|
||||
template = datasette.jinja_env.select_template(templates)
|
||||
environment = datasette.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
context = {
|
||||
**json_data,
|
||||
"database_color": db.color,
|
||||
"database_actions": database_actions,
|
||||
"show_hidden": request.args.get("_show_hidden"),
|
||||
"editable": True,
|
||||
|
@ -154,12 +158,14 @@ class DatabaseView(View):
|
|||
and not db.is_mutable
|
||||
and not db.is_memory,
|
||||
"attached_databases": attached_databases,
|
||||
"database_color": lambda _: "#ff0000",
|
||||
"alternate_url_json": alternate_url_json,
|
||||
"select_templates": [
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
],
|
||||
"top_database": make_slot_function(
|
||||
"top_database", datasette, request, database=database
|
||||
),
|
||||
}
|
||||
return Response.html(
|
||||
await datasette.render_template(
|
||||
|
@ -179,6 +185,7 @@ class DatabaseView(View):
|
|||
@dataclass
|
||||
class QueryContext:
|
||||
database: str = field(metadata={"help": "The name of the database being queried"})
|
||||
database_color: str = field(metadata={"help": "The color of the database"})
|
||||
query: dict = field(
|
||||
metadata={"help": "The SQL query object containing the `sql` string"}
|
||||
)
|
||||
|
@ -232,9 +239,6 @@ class QueryContext:
|
|||
show_hide_hidden: str = field(
|
||||
metadata={"help": "Hidden input field for the _show_sql parameter"}
|
||||
)
|
||||
database_color: Callable = field(
|
||||
metadata={"help": "Function that returns a color for a given database name"}
|
||||
)
|
||||
table_columns: dict = field(
|
||||
metadata={"help": "Dictionary of table name to list of column names"}
|
||||
)
|
||||
|
@ -247,6 +251,17 @@ class QueryContext:
|
|||
"help": "List of templates that were considered for rendering this page"
|
||||
}
|
||||
)
|
||||
top_query: callable = field(
|
||||
metadata={"help": "Callable to render the top_query slot"}
|
||||
)
|
||||
top_canned_query: callable = field(
|
||||
metadata={"help": "Callable to render the top_canned_query slot"}
|
||||
)
|
||||
query_actions: callable = field(
|
||||
metadata={
|
||||
"help": "Callable returning a list of links for the query action menu"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def get_tables(datasette, request, db):
|
||||
|
@ -596,7 +611,8 @@ class QueryView(View):
|
|||
f"query-{to_css_class(database)}-{to_css_class(canned_query['name'])}.html",
|
||||
)
|
||||
|
||||
template = datasette.jinja_env.select_template(templates)
|
||||
environment = datasette.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
alternate_url_json = datasette.absolute_url(
|
||||
request,
|
||||
datasette.urls.path(path_with_format(request=request, format="json")),
|
||||
|
@ -684,11 +700,28 @@ class QueryView(View):
|
|||
)
|
||||
)
|
||||
|
||||
async def query_actions():
|
||||
query_actions = []
|
||||
for hook in pm.hook.query_actions(
|
||||
datasette=datasette,
|
||||
actor=request.actor,
|
||||
database=database,
|
||||
query_name=canned_query["name"] if canned_query else None,
|
||||
request=request,
|
||||
sql=sql,
|
||||
params=params,
|
||||
):
|
||||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
query_actions.extend(extra_links)
|
||||
return query_actions
|
||||
|
||||
r = Response.html(
|
||||
await datasette.render_template(
|
||||
template,
|
||||
QueryContext(
|
||||
database=database,
|
||||
database_color=db.color,
|
||||
query={
|
||||
"sql": sql,
|
||||
"params": params,
|
||||
|
@ -709,9 +742,11 @@ class QueryView(View):
|
|||
display_rows=await display_rows(
|
||||
datasette, database, request, rows, columns
|
||||
),
|
||||
table_columns=await _table_columns(datasette, database)
|
||||
if allow_execute_sql
|
||||
else {},
|
||||
table_columns=(
|
||||
await _table_columns(datasette, database)
|
||||
if allow_execute_sql
|
||||
else {}
|
||||
),
|
||||
columns=columns,
|
||||
renderers=renderers,
|
||||
url_csv=datasette.urls.path(
|
||||
|
@ -721,12 +756,22 @@ class QueryView(View):
|
|||
),
|
||||
show_hide_hidden=markupsafe.Markup(show_hide_hidden),
|
||||
metadata=canned_query or metadata,
|
||||
database_color=lambda _: "#ff0000",
|
||||
alternate_url_json=alternate_url_json,
|
||||
select_templates=[
|
||||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
],
|
||||
top_query=make_slot_function(
|
||||
"top_query", datasette, request, database=database, sql=sql
|
||||
),
|
||||
top_canned_query=make_slot_function(
|
||||
"top_canned_query",
|
||||
datasette,
|
||||
request,
|
||||
database=database,
|
||||
query_name=canned_query["name"] if canned_query else None,
|
||||
),
|
||||
query_actions=query_actions,
|
||||
),
|
||||
request=request,
|
||||
view_name="database",
|
||||
|
@ -770,7 +815,17 @@ class MagicParameters(dict):
|
|||
class TableCreateView(BaseView):
|
||||
name = "table-create"
|
||||
|
||||
_valid_keys = {"table", "rows", "row", "columns", "pk", "pks", "ignore", "replace"}
|
||||
_valid_keys = {
|
||||
"table",
|
||||
"rows",
|
||||
"row",
|
||||
"columns",
|
||||
"pk",
|
||||
"pks",
|
||||
"ignore",
|
||||
"replace",
|
||||
"alter",
|
||||
}
|
||||
_supported_column_types = {
|
||||
"text",
|
||||
"integer",
|
||||
|
@ -828,7 +883,7 @@ class TableCreateView(BaseView):
|
|||
if not await self.ds.permission_allowed(
|
||||
request.actor, "update-row", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied - need update-row"], 403)
|
||||
return _error(["Permission denied: need update-row"], 403)
|
||||
|
||||
table_name = data.get("table")
|
||||
if not table_name:
|
||||
|
@ -852,7 +907,21 @@ class TableCreateView(BaseView):
|
|||
if not await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied - need insert-row"], 403)
|
||||
return _error(["Permission denied: need insert-row"], 403)
|
||||
|
||||
alter = False
|
||||
if rows or row:
|
||||
if not table_exists:
|
||||
# if table is being created for the first time, alter=True
|
||||
alter = True
|
||||
else:
|
||||
# alter=True only if they request it AND they have permission
|
||||
if data.get("alter"):
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "alter-table", resource=database_name
|
||||
):
|
||||
return _error(["Permission denied: need alter-table"], 403)
|
||||
alter = True
|
||||
|
||||
if columns:
|
||||
if rows or row:
|
||||
|
@ -917,10 +986,18 @@ class TableCreateView(BaseView):
|
|||
return _error(["pk cannot be changed for existing table"])
|
||||
pks = actual_pks
|
||||
|
||||
initial_schema = None
|
||||
if table_exists:
|
||||
initial_schema = await db.execute_fn(
|
||||
lambda conn: sqlite_utils.Database(conn)[table_name].schema
|
||||
)
|
||||
|
||||
def create_table(conn):
|
||||
table = sqlite_utils.Database(conn)[table_name]
|
||||
if rows:
|
||||
table.insert_all(rows, pk=pks or pk, ignore=ignore, replace=replace)
|
||||
table.insert_all(
|
||||
rows, pk=pks or pk, ignore=ignore, replace=replace, alter=alter
|
||||
)
|
||||
else:
|
||||
table.create(
|
||||
{c["name"]: c["type"] for c in columns},
|
||||
|
@ -932,6 +1009,18 @@ class TableCreateView(BaseView):
|
|||
schema = await db.execute_write_fn(create_table)
|
||||
except Exception as e:
|
||||
return _error([str(e)])
|
||||
|
||||
if initial_schema is not None and initial_schema != schema:
|
||||
await self.ds.track_event(
|
||||
AlterTableEvent(
|
||||
request.actor,
|
||||
database=database_name,
|
||||
table=table_name,
|
||||
before_schema=initial_schema,
|
||||
after_schema=schema,
|
||||
)
|
||||
)
|
||||
|
||||
table_url = self.ds.absolute_url(
|
||||
request, self.ds.urls.table(db.name, table_name)
|
||||
)
|
||||
|
@ -948,13 +1037,32 @@ class TableCreateView(BaseView):
|
|||
}
|
||||
if rows:
|
||||
details["row_count"] = len(rows)
|
||||
|
||||
if not table_exists:
|
||||
# Only log creation if we created a table
|
||||
await self.ds.track_event(
|
||||
CreateTableEvent(
|
||||
request.actor, database=db.name, table=table_name, schema=schema
|
||||
)
|
||||
)
|
||||
if rows:
|
||||
await self.ds.track_event(
|
||||
InsertRowsEvent(
|
||||
request.actor,
|
||||
database=db.name,
|
||||
table=table_name,
|
||||
num_rows=len(rows),
|
||||
ignore=ignore,
|
||||
replace=replace,
|
||||
)
|
||||
)
|
||||
return Response.json(details, status=201)
|
||||
|
||||
|
||||
async def _table_columns(datasette, database_name):
|
||||
internal = datasette.get_database("_internal")
|
||||
result = await internal.execute(
|
||||
"select table_name, name from columns where database_name = ?",
|
||||
internal_db = datasette.get_internal_database()
|
||||
result = await internal_db.execute(
|
||||
"select table_name, name from catalog_columns where database_name = ?",
|
||||
[database_name],
|
||||
)
|
||||
table_columns = {}
|
||||
|
@ -1017,9 +1125,11 @@ async def display_rows(datasette, database, request, rows, columns):
|
|||
display_value = markupsafe.Markup(
|
||||
'<a class="blob-download" href="{}"{}><Binary: {:,} byte{}></a>'.format(
|
||||
blob_url,
|
||||
' title="{}"'.format(formatted)
|
||||
if "bytes" not in formatted
|
||||
else "",
|
||||
(
|
||||
' title="{}"'.format(formatted)
|
||||
if "bytes" not in formatted
|
||||
else ""
|
||||
),
|
||||
len(value),
|
||||
"" if len(value) == 1 else "s",
|
||||
)
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
import hashlib
|
||||
import json
|
||||
|
||||
from datasette.utils import add_cors_headers, CustomJSONEncoder
|
||||
from datasette.plugins import pm
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
await_me_maybe,
|
||||
make_slot_function,
|
||||
CustomJSONEncoder,
|
||||
)
|
||||
from datasette.utils.asgi import Response
|
||||
from datasette.version import __version__
|
||||
|
||||
|
@ -105,9 +110,7 @@ class IndexView(BaseView):
|
|||
{
|
||||
"name": name,
|
||||
"hash": db.hash,
|
||||
"color": db.hash[:6]
|
||||
if db.hash
|
||||
else hashlib.md5(name.encode("utf8")).hexdigest()[:6],
|
||||
"color": db.color,
|
||||
"path": self.ds.urls.database(name),
|
||||
"tables_and_views_truncated": tables_and_views_truncated,
|
||||
"tables_and_views_more": (len(visible_tables) + len(views))
|
||||
|
@ -134,6 +137,15 @@ class IndexView(BaseView):
|
|||
headers=headers,
|
||||
)
|
||||
else:
|
||||
homepage_actions = []
|
||||
for hook in pm.hook.homepage_actions(
|
||||
datasette=self.ds,
|
||||
actor=request.actor,
|
||||
request=request,
|
||||
):
|
||||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
homepage_actions.extend(extra_links)
|
||||
return await self.render(
|
||||
["index.html"],
|
||||
request=request,
|
||||
|
@ -144,5 +156,9 @@ class IndexView(BaseView):
|
|||
"private": not await self.ds.permission_allowed(
|
||||
None, "view-instance"
|
||||
),
|
||||
"top_homepage": make_slot_function(
|
||||
"top_homepage", self.ds, request
|
||||
),
|
||||
"homepage_actions": homepage_actions,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
from datasette.utils.asgi import NotFound, Forbidden, Response
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.events import UpdateRowEvent, DeleteRowEvent
|
||||
from .base import DataView, BaseView, _error
|
||||
from datasette.utils import (
|
||||
tilde_decode,
|
||||
urlsafe_components,
|
||||
await_me_maybe,
|
||||
make_slot_function,
|
||||
to_css_class,
|
||||
escape_sqlite,
|
||||
row_sql_params_pks,
|
||||
)
|
||||
from datasette.plugins import pm
|
||||
import json
|
||||
import sqlite_utils
|
||||
from .table import display_columns_and_rows
|
||||
|
@ -18,7 +19,8 @@ class RowView(DataView):
|
|||
|
||||
async def data(self, request, default_labels=False):
|
||||
resolved = await self.ds.resolve_row(request)
|
||||
database = resolved.db.name
|
||||
db = resolved.db
|
||||
database = db.name
|
||||
table = resolved.table
|
||||
pk_values = resolved.pk_values
|
||||
|
||||
|
@ -55,11 +57,26 @@ class RowView(DataView):
|
|||
)
|
||||
for column in display_columns:
|
||||
column["sortable"] = False
|
||||
|
||||
row_actions = []
|
||||
for hook in pm.hook.row_actions(
|
||||
datasette=self.ds,
|
||||
actor=request.actor,
|
||||
request=request,
|
||||
database=database,
|
||||
table=table,
|
||||
row=rows[0],
|
||||
):
|
||||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
row_actions.extend(extra_links)
|
||||
|
||||
return {
|
||||
"private": private,
|
||||
"foreign_key_tables": await self.foreign_key_tables(
|
||||
database, table, pk_values
|
||||
),
|
||||
"database_color": db.color,
|
||||
"display_columns": display_columns,
|
||||
"display_rows": display_rows,
|
||||
"custom_table_templates": [
|
||||
|
@ -67,10 +84,19 @@ class RowView(DataView):
|
|||
f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html",
|
||||
"_table.html",
|
||||
],
|
||||
"row_actions": row_actions,
|
||||
"metadata": (self.ds.metadata("databases") or {})
|
||||
.get(database, {})
|
||||
.get("tables", {})
|
||||
.get(table, {}),
|
||||
"top_row": make_slot_function(
|
||||
"top_row",
|
||||
self.ds,
|
||||
request,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
row=rows[0],
|
||||
),
|
||||
}
|
||||
|
||||
data = {
|
||||
|
@ -80,7 +106,7 @@ class RowView(DataView):
|
|||
"columns": columns,
|
||||
"primary_keys": resolved.pks,
|
||||
"primary_key_values": pk_values,
|
||||
"units": self.ds.table_metadata(database, table).get("units", {}),
|
||||
"units": (await self.ds.table_config(database, table)).get("units", {}),
|
||||
}
|
||||
|
||||
if "foreign_key_tables" in (request.args.get("_extras") or "").split(","):
|
||||
|
@ -192,6 +218,15 @@ class RowDeleteView(BaseView):
|
|||
except Exception as e:
|
||||
return _error([str(e)], 500)
|
||||
|
||||
await self.ds.track_event(
|
||||
DeleteRowEvent(
|
||||
actor=request.actor,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
pks=resolved.pk_values,
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
||||
|
||||
|
@ -219,11 +254,21 @@ class RowUpdateView(BaseView):
|
|||
if not "update" in data or not isinstance(data["update"], dict):
|
||||
return _error(["JSON must contain an update dictionary"])
|
||||
|
||||
invalid_keys = set(data.keys()) - {"update", "return", "alter"}
|
||||
if invalid_keys:
|
||||
return _error(["Invalid keys: {}".format(", ".join(invalid_keys))])
|
||||
|
||||
update = data["update"]
|
||||
|
||||
alter = data.get("alter")
|
||||
if alter and not await self.ds.permission_allowed(
|
||||
request.actor, "alter-table", resource=(resolved.db.name, resolved.table)
|
||||
):
|
||||
return _error(["Permission denied for alter-table"], 403)
|
||||
|
||||
def update_row(conn):
|
||||
sqlite_utils.Database(conn)[resolved.table].update(
|
||||
resolved.pk_values, update
|
||||
resolved.pk_values, update, alter=alter
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -238,4 +283,14 @@ class RowUpdateView(BaseView):
|
|||
)
|
||||
rows = list(results.rows)
|
||||
result["row"] = dict(rows[0])
|
||||
|
||||
await self.ds.track_event(
|
||||
UpdateRowEvent(
|
||||
actor=request.actor,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
pks=resolved.pk_values,
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json(result, status=200)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import json
|
||||
from datasette.events import LogoutEvent, LoginEvent, CreateTokenEvent
|
||||
from datasette.utils.asgi import Response, Forbidden
|
||||
from datasette.utils import (
|
||||
actor_matches_allow,
|
||||
|
@ -41,7 +42,7 @@ class JsonDataView(BaseView):
|
|||
if self.ds.cors:
|
||||
add_cors_headers(headers)
|
||||
return Response(
|
||||
json.dumps(data),
|
||||
json.dumps(data, default=repr),
|
||||
content_type="application/json; charset=utf-8",
|
||||
headers=headers,
|
||||
)
|
||||
|
@ -52,7 +53,7 @@ class JsonDataView(BaseView):
|
|||
request=request,
|
||||
context={
|
||||
"filename": self.filename,
|
||||
"data_json": json.dumps(data, indent=4),
|
||||
"data_json": json.dumps(data, indent=4, default=repr),
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -80,9 +81,9 @@ class AuthTokenView(BaseView):
|
|||
if secrets.compare_digest(token, self.ds._root_token):
|
||||
self.ds._root_token = None
|
||||
response = Response.redirect(self.ds.urls.instance())
|
||||
response.set_cookie(
|
||||
"ds_actor", self.ds.sign({"a": {"id": "root"}}, "actor")
|
||||
)
|
||||
root_actor = {"id": "root"}
|
||||
response.set_cookie("ds_actor", self.ds.sign({"a": root_actor}, "actor"))
|
||||
await self.ds.track_event(LoginEvent(actor=root_actor))
|
||||
return response
|
||||
else:
|
||||
raise Forbidden("Invalid token")
|
||||
|
@ -105,6 +106,7 @@ class LogoutView(BaseView):
|
|||
response = Response.redirect(self.ds.urls.instance())
|
||||
response.set_cookie("ds_actor", "", expires=0, max_age=0)
|
||||
self.ds.add_message(request, "You are now logged out", self.ds.WARNING)
|
||||
await self.ds.track_event(LogoutEvent(actor=request.actor))
|
||||
return response
|
||||
|
||||
|
||||
|
@ -122,7 +124,17 @@ class PermissionsDebugView(BaseView):
|
|||
# list() avoids error if check is performed during template render:
|
||||
{
|
||||
"permission_checks": list(reversed(self.ds._permission_checks)),
|
||||
"permissions": list(self.ds.permissions.values()),
|
||||
"permissions": [
|
||||
{
|
||||
"name": p.name,
|
||||
"abbr": p.abbr,
|
||||
"description": p.description,
|
||||
"takes_database": p.takes_database,
|
||||
"takes_resource": p.takes_resource,
|
||||
"default": p.default,
|
||||
}
|
||||
for p in self.ds.permissions.values()
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -152,6 +164,7 @@ class PermissionsDebugView(BaseView):
|
|||
"permission": permission,
|
||||
"resource": resource,
|
||||
"result": result,
|
||||
"default": self.ds.permissions[permission].default,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -238,7 +251,7 @@ class CreateTokenView(BaseView):
|
|||
# Build list of databases and tables the user has permission to view
|
||||
database_with_tables = []
|
||||
for database in self.ds.databases.values():
|
||||
if database.name in ("_internal", "_memory"):
|
||||
if database.name == "_memory":
|
||||
continue
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "view-database", database.name
|
||||
|
@ -339,6 +352,15 @@ class CreateTokenView(BaseView):
|
|||
restrict_resource=restrict_resource,
|
||||
)
|
||||
token_bits = self.ds.unsign(token[len("dstok_") :], namespace="token")
|
||||
await self.ds.track_event(
|
||||
CreateTokenEvent(
|
||||
actor=request.actor,
|
||||
expires_after=expires_after,
|
||||
restrict_all=restrict_all,
|
||||
restrict_database=restrict_database,
|
||||
restrict_resource=restrict_resource,
|
||||
)
|
||||
)
|
||||
context = await self.shared(request)
|
||||
context.update({"errors": errors, "token": token, "token_bits": token_bits})
|
||||
return await self.render(["create_token.html"], request, context)
|
||||
|
@ -354,9 +376,7 @@ class ApiExplorerView(BaseView):
|
|||
if name == "_internal":
|
||||
continue
|
||||
database_visible, _ = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-database",
|
||||
name,
|
||||
request.actor, permissions=[("view-database", name), "view-instance"]
|
||||
)
|
||||
if not database_visible:
|
||||
continue
|
||||
|
@ -365,8 +385,11 @@ class ApiExplorerView(BaseView):
|
|||
for table in table_names:
|
||||
visible, _ = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
"view-table",
|
||||
(name, table),
|
||||
permissions=[
|
||||
("view-table", (name, table)),
|
||||
("view-database", name),
|
||||
"view-instance",
|
||||
],
|
||||
)
|
||||
if not visible:
|
||||
continue
|
||||
|
@ -463,6 +486,13 @@ class ApiExplorerView(BaseView):
|
|||
return databases
|
||||
|
||||
async def get(self, request):
|
||||
visible, private = await self.ds.check_visibility(
|
||||
request.actor,
|
||||
permissions=["view-instance"],
|
||||
)
|
||||
if not visible:
|
||||
raise Forbidden("You do not have permission to view this instance")
|
||||
|
||||
def api_path(link):
|
||||
return "/-/api#{}".format(
|
||||
urllib.parse.urlencode(
|
||||
|
@ -480,5 +510,6 @@ class ApiExplorerView(BaseView):
|
|||
{
|
||||
"example_links": await self.example_links(request),
|
||||
"api_path": api_path,
|
||||
"private": private,
|
||||
},
|
||||
)
|
||||
|
|
|
@ -8,6 +8,12 @@ import markupsafe
|
|||
|
||||
from datasette.plugins import pm
|
||||
from datasette.database import QueryInterrupted
|
||||
from datasette.events import (
|
||||
AlterTableEvent,
|
||||
DropTableEvent,
|
||||
InsertRowsEvent,
|
||||
UpsertRowsEvent,
|
||||
)
|
||||
from datasette import tracer
|
||||
from datasette.utils import (
|
||||
add_cors_headers,
|
||||
|
@ -17,6 +23,7 @@ from datasette.utils import (
|
|||
append_querystring,
|
||||
compound_keys_after_sql,
|
||||
format_bytes,
|
||||
make_slot_function,
|
||||
tilde_encode,
|
||||
escape_sqlite,
|
||||
filters_should_redirect,
|
||||
|
@ -74,11 +81,10 @@ class Row:
|
|||
return json.dumps(d, default=repr, indent=2)
|
||||
|
||||
|
||||
async def _gather_parallel(*args):
|
||||
return await asyncio.gather(*args)
|
||||
|
||||
|
||||
async def _gather_sequential(*args):
|
||||
async def run_sequential(*args):
|
||||
# This used to be swappable for asyncio.gather() to run things in
|
||||
# parallel, but this lead to hard-to-debug locking issues with
|
||||
# in-memory databases: https://github.com/simonw/datasette/issues/2189
|
||||
results = []
|
||||
for fn in args:
|
||||
results.append(await fn)
|
||||
|
@ -141,11 +147,11 @@ async def display_columns_and_rows(
|
|||
"""Returns columns, rows for specified table - including fancy foreign key treatment"""
|
||||
sortable_columns = sortable_columns or set()
|
||||
db = datasette.databases[database_name]
|
||||
table_metadata = datasette.table_metadata(database_name, table_name)
|
||||
column_descriptions = table_metadata.get("columns") or {}
|
||||
column_descriptions = datasette.metadata("columns", database_name, table_name) or {}
|
||||
column_details = {
|
||||
col.name: col for col in await db.table_column_details(table_name)
|
||||
}
|
||||
table_config = await datasette.table_config(database_name, table_name)
|
||||
pks = await db.primary_keys(table_name)
|
||||
pks_for_display = pks
|
||||
if not pks_for_display:
|
||||
|
@ -210,7 +216,6 @@ async def display_columns_and_rows(
|
|||
"raw": pk_path,
|
||||
"value": markupsafe.Markup(
|
||||
'<a href="{table_path}/{flat_pks_quoted}">{flat_pks}</a>'.format(
|
||||
base_url=base_url,
|
||||
table_path=datasette.urls.table(database_name, table_name),
|
||||
flat_pks=str(markupsafe.escape(pk_path)),
|
||||
flat_pks_quoted=path_from_row_pks(row, pks, not pks),
|
||||
|
@ -254,9 +259,11 @@ async def display_columns_and_rows(
|
|||
path_from_row_pks(row, pks, not pks),
|
||||
column,
|
||||
),
|
||||
' title="{}"'.format(formatted)
|
||||
if "bytes" not in formatted
|
||||
else "",
|
||||
(
|
||||
' title="{}"'.format(formatted)
|
||||
if "bytes" not in formatted
|
||||
else ""
|
||||
),
|
||||
len(value),
|
||||
"" if len(value) == 1 else "s",
|
||||
)
|
||||
|
@ -289,9 +296,9 @@ async def display_columns_and_rows(
|
|||
),
|
||||
)
|
||||
)
|
||||
elif column in table_metadata.get("units", {}) and value != "":
|
||||
elif column in table_config.get("units", {}) and value != "":
|
||||
# Interpret units using pint
|
||||
value = value * ureg(table_metadata["units"][column])
|
||||
value = value * ureg(table_config["units"][column])
|
||||
# Pint uses floating point which sometimes introduces errors in the compact
|
||||
# representation, which we have to round off to avoid ugliness. In the vast
|
||||
# majority of cases this rounding will be inconsequential. I hope.
|
||||
|
@ -307,9 +314,9 @@ async def display_columns_and_rows(
|
|||
"column": column,
|
||||
"value": display_value,
|
||||
"raw": value,
|
||||
"value_type": "none"
|
||||
if value is None
|
||||
else str(type(value).__name__),
|
||||
"value_type": (
|
||||
"none" if value is None else str(type(value).__name__)
|
||||
),
|
||||
}
|
||||
)
|
||||
cell_rows.append(Row(cells))
|
||||
|
@ -404,7 +411,7 @@ class TableInsertView(BaseView):
|
|||
extras = {
|
||||
key: value for key, value in data.items() if key not in ("row", "rows")
|
||||
}
|
||||
valid_extras = {"return", "ignore", "replace"}
|
||||
valid_extras = {"return", "ignore", "replace", "alter"}
|
||||
invalid_extras = extras.keys() - valid_extras
|
||||
if invalid_extras:
|
||||
return _errors(
|
||||
|
@ -413,7 +420,6 @@ class TableInsertView(BaseView):
|
|||
if extras.get("ignore") and extras.get("replace"):
|
||||
return _errors(['Cannot use "ignore" and "replace" at the same time'])
|
||||
|
||||
# Validate columns of each row
|
||||
columns = set(await db.table_columns(table_name))
|
||||
columns.update(pks_list)
|
||||
|
||||
|
@ -428,7 +434,7 @@ class TableInsertView(BaseView):
|
|||
)
|
||||
)
|
||||
invalid_columns = set(row.keys()) - columns
|
||||
if invalid_columns:
|
||||
if invalid_columns and not extras.get("alter"):
|
||||
errors.append(
|
||||
"Row {} has invalid columns: {}".format(
|
||||
i, ", ".join(sorted(invalid_columns))
|
||||
|
@ -456,10 +462,10 @@ class TableInsertView(BaseView):
|
|||
# Must have insert-row AND upsert-row permissions
|
||||
if not (
|
||||
await self.ds.permission_allowed(
|
||||
request.actor, "insert-row", database_name, table_name
|
||||
request.actor, "insert-row", resource=(database_name, table_name)
|
||||
)
|
||||
and await self.ds.permission_allowed(
|
||||
request.actor, "update-row", database_name, table_name
|
||||
request.actor, "update-row", resource=(database_name, table_name)
|
||||
)
|
||||
):
|
||||
return _error(
|
||||
|
@ -483,6 +489,8 @@ class TableInsertView(BaseView):
|
|||
if errors:
|
||||
return _error(errors, 400)
|
||||
|
||||
num_rows = len(rows)
|
||||
|
||||
# No that we've passed pks to _validate_data it's safe to
|
||||
# fix the rowids case:
|
||||
if not pks:
|
||||
|
@ -490,10 +498,28 @@ class TableInsertView(BaseView):
|
|||
|
||||
ignore = extras.get("ignore")
|
||||
replace = extras.get("replace")
|
||||
alter = extras.get("alter")
|
||||
|
||||
if upsert and (ignore or replace):
|
||||
return _error(["Upsert does not support ignore or replace"], 400)
|
||||
|
||||
if replace and not await self.ds.permission_allowed(
|
||||
request.actor, "update-row", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(['Permission denied: need update-row to use "replace"'], 403)
|
||||
|
||||
initial_schema = None
|
||||
if alter:
|
||||
# Must have alter-table permission
|
||||
if not await self.ds.permission_allowed(
|
||||
request.actor, "alter-table", resource=(database_name, table_name)
|
||||
):
|
||||
return _error(["Permission denied for alter-table"], 403)
|
||||
# Track initial schema to check if it changed later
|
||||
initial_schema = await db.execute_fn(
|
||||
lambda conn: sqlite_utils.Database(conn)[table_name].schema
|
||||
)
|
||||
|
||||
should_return = bool(extras.get("return", False))
|
||||
row_pk_values_for_later = []
|
||||
if should_return and upsert:
|
||||
|
@ -503,9 +529,13 @@ class TableInsertView(BaseView):
|
|||
table = sqlite_utils.Database(conn)[table_name]
|
||||
kwargs = {}
|
||||
if upsert:
|
||||
kwargs["pk"] = pks[0] if len(pks) == 1 else pks
|
||||
kwargs = {
|
||||
"pk": pks[0] if len(pks) == 1 else pks,
|
||||
"alter": alter,
|
||||
}
|
||||
else:
|
||||
kwargs = {"ignore": ignore, "replace": replace}
|
||||
# Insert
|
||||
kwargs = {"ignore": ignore, "replace": replace, "alter": alter}
|
||||
if should_return and not upsert:
|
||||
rowids = []
|
||||
method = table.upsert if upsert else table.insert
|
||||
|
@ -543,6 +573,44 @@ class TableInsertView(BaseView):
|
|||
result["rows"] = [dict(r) for r in fetched_rows.rows]
|
||||
else:
|
||||
result["rows"] = rows
|
||||
# We track the number of rows requested, but do not attempt to show which were actually
|
||||
# inserted or upserted v.s. ignored
|
||||
if upsert:
|
||||
await self.ds.track_event(
|
||||
UpsertRowsEvent(
|
||||
actor=request.actor,
|
||||
database=database_name,
|
||||
table=table_name,
|
||||
num_rows=num_rows,
|
||||
)
|
||||
)
|
||||
else:
|
||||
await self.ds.track_event(
|
||||
InsertRowsEvent(
|
||||
actor=request.actor,
|
||||
database=database_name,
|
||||
table=table_name,
|
||||
num_rows=num_rows,
|
||||
ignore=bool(ignore),
|
||||
replace=bool(replace),
|
||||
)
|
||||
)
|
||||
|
||||
if initial_schema is not None:
|
||||
after_schema = await db.execute_fn(
|
||||
lambda conn: sqlite_utils.Database(conn)[table_name].schema
|
||||
)
|
||||
if initial_schema != after_schema:
|
||||
await self.ds.track_event(
|
||||
AlterTableEvent(
|
||||
request.actor,
|
||||
database=database_name,
|
||||
table=table_name,
|
||||
before_schema=initial_schema,
|
||||
after_schema=after_schema,
|
||||
)
|
||||
)
|
||||
|
||||
return Response.json(result, status=200 if upsert else 201)
|
||||
|
||||
|
||||
|
@ -581,7 +649,7 @@ class TableDropView(BaseView):
|
|||
try:
|
||||
data = json.loads(await request.post_body())
|
||||
confirm = data.get("confirm")
|
||||
except json.JSONDecodeError as e:
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
if not confirm:
|
||||
|
@ -603,6 +671,11 @@ class TableDropView(BaseView):
|
|||
sqlite_utils.Database(conn)[table_name].drop()
|
||||
|
||||
await db.execute_write_fn(drop_table)
|
||||
await self.ds.track_event(
|
||||
DropTableEvent(
|
||||
actor=request.actor, database=database_name, table=table_name
|
||||
)
|
||||
)
|
||||
return Response.json({"ok": True}, status=200)
|
||||
|
||||
|
||||
|
@ -648,7 +721,7 @@ async def _columns_to_select(table_columns, pks, request):
|
|||
|
||||
async def _sortable_columns_for_table(datasette, database_name, table_name, use_rowid):
|
||||
db = datasette.databases[database_name]
|
||||
table_metadata = datasette.table_metadata(database_name, table_name)
|
||||
table_metadata = await datasette.table_config(database_name, table_name)
|
||||
if "sortable_columns" in table_metadata:
|
||||
sortable_columns = set(table_metadata["sortable_columns"])
|
||||
else:
|
||||
|
@ -825,7 +898,8 @@ async def table_view_traced(datasette, request):
|
|||
f"table-{to_css_class(resolved.db.name)}-{to_css_class(resolved.table)}.html",
|
||||
"table.html",
|
||||
]
|
||||
template = datasette.jinja_env.select_template(templates)
|
||||
environment = datasette.get_jinja_environment(request)
|
||||
template = environment.select_template(templates)
|
||||
alternate_url_json = datasette.absolute_url(
|
||||
request,
|
||||
datasette.urls.path(path_with_format(request=request, format="json")),
|
||||
|
@ -860,6 +934,13 @@ async def table_view_traced(datasette, request):
|
|||
f"{'*' if template_name == template.name else ''}{template_name}"
|
||||
for template_name in templates
|
||||
],
|
||||
top_table=make_slot_function(
|
||||
"top_table",
|
||||
datasette,
|
||||
request,
|
||||
database=resolved.db.name,
|
||||
table=resolved.table,
|
||||
),
|
||||
),
|
||||
request=request,
|
||||
view_name="table",
|
||||
|
@ -939,7 +1020,7 @@ async def table_view_data(
|
|||
nocount = True
|
||||
nofacet = True
|
||||
|
||||
table_metadata = datasette.table_metadata(database_name, table_name)
|
||||
table_metadata = await datasette.table_config(database_name, table_name)
|
||||
units = table_metadata.get("units", {})
|
||||
|
||||
# Arguments that start with _ and don't contain a __ are
|
||||
|
@ -984,9 +1065,9 @@ async def table_view_data(
|
|||
|
||||
from_sql = "from {table_name} {where}".format(
|
||||
table_name=escape_sqlite(table_name),
|
||||
where=("where {} ".format(" and ".join(where_clauses)))
|
||||
if where_clauses
|
||||
else "",
|
||||
where=(
|
||||
("where {} ".format(" and ".join(where_clauses))) if where_clauses else ""
|
||||
),
|
||||
)
|
||||
# Copy of params so we can mutate them later:
|
||||
from_sql_params = dict(**params)
|
||||
|
@ -1050,10 +1131,12 @@ async def table_view_data(
|
|||
column=escape_sqlite(sort or sort_desc),
|
||||
op=">" if sort else "<",
|
||||
p=len(params),
|
||||
extra_desc_only=""
|
||||
if sort
|
||||
else " or {column2} is null".format(
|
||||
column2=escape_sqlite(sort or sort_desc)
|
||||
extra_desc_only=(
|
||||
""
|
||||
if sort
|
||||
else " or {column2} is null".format(
|
||||
column2=escape_sqlite(sort or sort_desc)
|
||||
)
|
||||
),
|
||||
next_clauses=" and ".join(next_by_pk_clauses),
|
||||
)
|
||||
|
@ -1162,7 +1245,7 @@ async def table_view_data(
|
|||
# Expand them
|
||||
expanded_labels.update(
|
||||
await datasette.expand_foreign_keys(
|
||||
database_name, table_name, column, values
|
||||
request.actor, database_name, table_name, column, values
|
||||
)
|
||||
)
|
||||
if expanded_labels:
|
||||
|
@ -1201,9 +1284,6 @@ async def table_view_data(
|
|||
)
|
||||
rows = rows[:page_size]
|
||||
|
||||
# For performance profiling purposes, ?_noparallel=1 turns off asyncio.gather
|
||||
gather = _gather_sequential if request.args.get("_noparallel") else _gather_parallel
|
||||
|
||||
# Resolve extras
|
||||
extras = _get_extras(request)
|
||||
if any(k for k in request.args.keys() if k == "_facet" or k.startswith("_facet_")):
|
||||
|
@ -1253,7 +1333,7 @@ async def table_view_data(
|
|||
sql=sql_no_order_no_limit,
|
||||
params=params,
|
||||
table=table_name,
|
||||
metadata=table_metadata,
|
||||
table_config=table_metadata,
|
||||
row_count=extra_count,
|
||||
)
|
||||
)
|
||||
|
@ -1267,7 +1347,7 @@ async def table_view_data(
|
|||
if not nofacet:
|
||||
# Run them in parallel
|
||||
facet_awaitables = [facet.facet_results() for facet in facet_instances]
|
||||
facet_awaitable_results = await gather(*facet_awaitables)
|
||||
facet_awaitable_results = await run_sequential(*facet_awaitables)
|
||||
for (
|
||||
instance_facet_results,
|
||||
instance_facets_timed_out,
|
||||
|
@ -1300,7 +1380,7 @@ async def table_view_data(
|
|||
):
|
||||
# Run them in parallel
|
||||
facet_suggest_awaitables = [facet.suggest() for facet in facet_instances]
|
||||
for suggest_result in await gather(*facet_suggest_awaitables):
|
||||
for suggest_result in await run_sequential(*facet_suggest_awaitables):
|
||||
suggested_facets.extend(suggest_result)
|
||||
return suggested_facets
|
||||
|
||||
|
@ -1339,22 +1419,28 @@ async def table_view_data(
|
|||
"Primary keys for this table"
|
||||
return pks
|
||||
|
||||
async def extra_table_actions():
|
||||
async def table_actions():
|
||||
async def extra_actions():
|
||||
async def actions():
|
||||
links = []
|
||||
for hook in pm.hook.table_actions(
|
||||
datasette=datasette,
|
||||
table=table_name,
|
||||
database=database_name,
|
||||
actor=request.actor,
|
||||
request=request,
|
||||
):
|
||||
kwargs = {
|
||||
"datasette": datasette,
|
||||
"database": database_name,
|
||||
"actor": request.actor,
|
||||
"request": request,
|
||||
}
|
||||
if is_view:
|
||||
kwargs["view"] = table_name
|
||||
method = pm.hook.view_actions
|
||||
else:
|
||||
kwargs["table"] = table_name
|
||||
method = pm.hook.table_actions
|
||||
for hook in method(**kwargs):
|
||||
extra_links = await await_me_maybe(hook)
|
||||
if extra_links:
|
||||
links.extend(extra_links)
|
||||
return links
|
||||
|
||||
return table_actions
|
||||
return actions
|
||||
|
||||
async def extra_is_view():
|
||||
return is_view
|
||||
|
@ -1426,7 +1512,7 @@ async def table_view_data(
|
|||
return table_name
|
||||
|
||||
async def extra_database_color():
|
||||
return lambda _: "ff0000"
|
||||
return db.color
|
||||
|
||||
async def extra_form_hidden_args():
|
||||
form_hidden_args = []
|
||||
|
@ -1544,7 +1630,7 @@ async def table_view_data(
|
|||
"database",
|
||||
"table",
|
||||
"database_color",
|
||||
"table_actions",
|
||||
"actions",
|
||||
"filters",
|
||||
"renderers",
|
||||
"custom_table_templates",
|
||||
|
@ -1585,7 +1671,7 @@ async def table_view_data(
|
|||
extra_database,
|
||||
extra_table,
|
||||
extra_database_color,
|
||||
extra_table_actions,
|
||||
extra_actions,
|
||||
extra_filters,
|
||||
extra_renderers,
|
||||
extra_custom_table_templates,
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
from datasette import hookimpl
|
||||
|
||||
# Test command:
|
||||
# datasette fixtures.db \ --plugins-dir=demos/plugins/
|
||||
# \ --static static:demos/plugins/static
|
||||
|
||||
# Create a set with view names that qualify for this JS, since plugins won't do anything on other pages
|
||||
# Same pattern as in Nteract data explorer
|
||||
# https://github.com/hydrosquall/datasette-nteract-data-explorer/blob/main/datasette_nteract_data_explorer/__init__.py#L77
|
||||
PERMITTED_VIEWS = {"table", "query", "database"}
|
||||
|
||||
|
||||
@hookimpl
|
||||
def extra_js_urls(view_name):
|
||||
print(view_name)
|
||||
if view_name in PERMITTED_VIEWS:
|
||||
return [
|
||||
{
|
||||
"url": "/static/table-example-plugins.js",
|
||||
}
|
||||
]
|
|
@ -0,0 +1,100 @@
|
|||
/**
|
||||
* Example usage of Datasette JS Manager API
|
||||
*/
|
||||
|
||||
document.addEventListener("datasette_init", function (evt) {
|
||||
const { detail: manager } = evt;
|
||||
// === Demo plugins: remove before merge===
|
||||
addPlugins(manager);
|
||||
});
|
||||
|
||||
/**
|
||||
* Examples for to test datasette JS api
|
||||
*/
|
||||
const addPlugins = (manager) => {
|
||||
|
||||
manager.registerPlugin("column-name-plugin", {
|
||||
version: 0.1,
|
||||
makeColumnActions: (columnMeta) => {
|
||||
const { column } = columnMeta;
|
||||
|
||||
return [
|
||||
{
|
||||
label: "Copy name to clipboard",
|
||||
onClick: (evt) => copyToClipboard(column),
|
||||
},
|
||||
{
|
||||
label: "Log column metadata to console",
|
||||
onClick: (evt) => console.log(column),
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
manager.registerPlugin("panel-plugin-graphs", {
|
||||
version: 0.1,
|
||||
makeAboveTablePanelConfigs: () => {
|
||||
return [
|
||||
{
|
||||
id: 'first-panel',
|
||||
label: "First",
|
||||
render: node => {
|
||||
const description = document.createElement('p');
|
||||
description.innerText = 'Hello world';
|
||||
node.appendChild(description);
|
||||
}
|
||||
},
|
||||
{
|
||||
id: 'second-panel',
|
||||
label: "Second",
|
||||
render: node => {
|
||||
const iframe = document.createElement('iframe');
|
||||
iframe.src = "https://observablehq.com/embed/@d3/sortable-bar-chart?cell=viewof+order&cell=chart";
|
||||
iframe.width = 800;
|
||||
iframe.height = 635;
|
||||
iframe.frameborder = '0';
|
||||
node.appendChild(iframe);
|
||||
}
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
manager.registerPlugin("panel-plugin-maps", {
|
||||
version: 0.1,
|
||||
makeAboveTablePanelConfigs: () => {
|
||||
return [
|
||||
{
|
||||
// ID only has to be unique within a plugin, manager namespaces for you
|
||||
id: 'first-map-panel',
|
||||
label: "Map plugin",
|
||||
// datasette-vega, leafleft can provide a "render" function
|
||||
render: node => node.innerHTML = "Here sits a map",
|
||||
},
|
||||
{
|
||||
id: 'second-panel',
|
||||
label: "Image plugin",
|
||||
render: node => {
|
||||
const img = document.createElement('img');
|
||||
img.src = 'https://datasette.io/static/datasette-logo.svg'
|
||||
node.appendChild(img);
|
||||
},
|
||||
}
|
||||
];
|
||||
},
|
||||
});
|
||||
|
||||
// Future: dispatch message to some other part of the page with CustomEvent API
|
||||
// Could use to drive filter/sort query builder actions without page refresh.
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function copyToClipboard(str) {
|
||||
try {
|
||||
await navigator.clipboard.writeText(str);
|
||||
} catch (err) {
|
||||
/** Rejected - text failed to copy to the clipboard. Browsers didn't give permission */
|
||||
console.error('Failed to copy: ', err);
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@
|
|||
Authentication and permissions
|
||||
================================
|
||||
|
||||
Datasette does not require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries.
|
||||
Datasette doesn't require authentication by default. Any visitor to a Datasette instance can explore the full data and execute read-only SQL queries.
|
||||
|
||||
Datasette's plugin system can be used to add many different styles of authentication, such as user accounts, single sign-on or API keys.
|
||||
|
||||
|
@ -32,7 +32,10 @@ The one exception is the "root" account, which you can sign into while using Dat
|
|||
|
||||
To sign in as root, start Datasette using the ``--root`` command-line option, like this::
|
||||
|
||||
$ datasette --root
|
||||
datasette --root
|
||||
|
||||
::
|
||||
|
||||
http://127.0.0.1:8001/-/auth-token?token=786fc524e0199d70dc9a581d851f466244e114ca92f33aa3b42a139e9388daa7
|
||||
INFO: Started server process [25801]
|
||||
INFO: Waiting for application startup.
|
||||
|
@ -64,26 +67,65 @@ An **action** is a string describing the action the actor would like to perform.
|
|||
|
||||
A **resource** is the item the actor wishes to interact with - for example a specific database or table. Some actions, such as ``permissions-debug``, are not associated with a particular resource.
|
||||
|
||||
Datasette's built-in view permissions (``view-database``, ``view-table`` etc) default to *allow* - unless you :ref:`configure additional permission rules <authentication_permissions_metadata>` unauthenticated users will be allowed to access content.
|
||||
Datasette's built-in view permissions (``view-database``, ``view-table`` etc) default to *allow* - unless you :ref:`configure additional permission rules <authentication_permissions_config>` unauthenticated users will be allowed to access content.
|
||||
|
||||
Permissions with potentially harmful effects should default to *deny*. Plugin authors should account for this when designing new plugins - for example, the `datasette-upload-csvs <https://github.com/simonw/datasette-upload-csvs>`__ plugin defaults to deny so that installations don't accidentally allow unauthenticated users to create new tables by uploading a CSV file.
|
||||
|
||||
.. _authentication_permissions_explained:
|
||||
|
||||
How permissions are resolved
|
||||
----------------------------
|
||||
|
||||
The :ref:`datasette.permission_allowed(actor, action, resource=None, default=...)<datasette_permission_allowed>` method is called to check if an actor is allowed to perform a specific action.
|
||||
|
||||
This method asks every plugin that implements the :ref:`plugin_hook_permission_allowed` hook if the actor is allowed to perform the action.
|
||||
|
||||
Each plugin can return ``True`` to indicate that the actor is allowed to perform the action, ``False`` if they are not allowed and ``None`` if the plugin has no opinion on the matter.
|
||||
|
||||
``False`` acts as a veto - if any plugin returns ``False`` then the permission check is denied. Otherwise, if any plugin returns ``True`` then the permission check is allowed.
|
||||
|
||||
The ``resource`` argument can be used to specify a specific resource that the action is being performed against. Some permissions, such as ``view-instance``, do not involve a resource. Others such as ``view-database`` have a resource that is a string naming the database. Permissions that take both a database name and the name of a table, view or canned query within that database use a resource that is a tuple of two strings, ``(database_name, resource_name)``.
|
||||
|
||||
Plugins that implement the ``permission_allowed()`` hook can decide if they are going to consider the provided resource or not.
|
||||
|
||||
.. _authentication_permissions_allow:
|
||||
|
||||
Defining permissions with "allow" blocks
|
||||
----------------------------------------
|
||||
|
||||
The standard way to define permissions in Datasette is to use an ``"allow"`` block. This is a JSON document describing which actors are allowed to perform a permission.
|
||||
The standard way to define permissions in Datasette is to use an ``"allow"`` block :ref:`in the datasette.yaml file <authentication_permissions_config>`. This is a JSON document describing which actors are allowed to perform a permission.
|
||||
|
||||
The most basic form of allow block is this (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22root%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%22id%22%3A+%22trevor%22%7D&allow=%7B%0D%0A++++++++%22id%22%3A+%22root%22%0D%0A++++%7D>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow:
|
||||
id: root
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": {
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
id: root
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": "root"
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
This will match any actors with an ``"id"`` property of ``"root"`` - for example, an actor that looks like this:
|
||||
|
||||
|
@ -96,29 +138,98 @@ This will match any actors with an ``"id"`` property of ``"root"`` - for example
|
|||
|
||||
An allow block can specify "deny all" using ``false`` (`demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22root%22%0D%0A%7D&allow=false>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow: false
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": false
|
||||
}
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow: false
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": false
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
An ``"allow"`` of ``true`` allows all access (`demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22root%22%0D%0A%7D&allow=true>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow: true
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": true
|
||||
}
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow: true
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": true
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
Allow keys can provide a list of values. These will match any actor that has any of those values (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22cleopaws%22%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%5B%0D%0A++++++++%22simon%22%2C%0D%0A++++++++%22cleopaws%22%0D%0A++++%5D%0D%0A%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22pancakes%22%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%5B%0D%0A++++++++%22simon%22%2C%0D%0A++++++++%22cleopaws%22%0D%0A++++%5D%0D%0A%7D>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow:
|
||||
id:
|
||||
- simon
|
||||
- cleopaws
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": ["simon", "cleopaws"]
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
id:
|
||||
- simon
|
||||
- cleopaws
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": [
|
||||
"simon",
|
||||
"cleopaws"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
This will match any actor with an ``"id"`` of either ``"simon"`` or ``"cleopaws"``.
|
||||
|
||||
|
@ -126,53 +237,154 @@ Actors can have properties that feature a list of values. These will be matched
|
|||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"id": "simon",
|
||||
"roles": ["staff", "developer"]
|
||||
}
|
||||
{
|
||||
"id": "simon",
|
||||
"roles": ["staff", "developer"]
|
||||
}
|
||||
|
||||
This allow block will provide access to any actor that has ``"developer"`` as one of their roles (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22simon%22%2C%0D%0A++++%22roles%22%3A+%5B%0D%0A++++++++%22staff%22%2C%0D%0A++++++++%22developer%22%0D%0A++++%5D%0D%0A%7D&allow=%7B%0D%0A++++%22roles%22%3A+%5B%0D%0A++++++++%22developer%22%0D%0A++++%5D%0D%0A%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22cleopaws%22%2C%0D%0A++++%22roles%22%3A+%5B%22dog%22%5D%0D%0A%7D&allow=%7B%0D%0A++++%22roles%22%3A+%5B%0D%0A++++++++%22developer%22%0D%0A++++%5D%0D%0A%7D>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow:
|
||||
roles:
|
||||
- developer
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"roles": ["developer"]
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
roles:
|
||||
- developer
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"roles": [
|
||||
"developer"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
Note that "roles" is not a concept that is baked into Datasette - it's a convention that plugins can choose to implement and act on.
|
||||
|
||||
If you want to provide access to any actor with a value for a specific key, use ``"*"``. For example, to match any logged-in user specify the following (`allow demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22simon%22%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%22*%22%0D%0A%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22bot%22%3A+%22readme-bot%22%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%22*%22%0D%0A%7D>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow:
|
||||
id: "*"
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": {
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
id: "*"
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": "*"
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
You can specify that only unauthenticated actors (from anynomous HTTP requests) should be allowed access using the special ``"unauthenticated": true`` key in an allow block (`allow demo <https://latest.datasette.io/-/allow-debug?actor=null&allow=%7B%0D%0A++++%22unauthenticated%22%3A+true%0D%0A%7D>`__, `deny demo <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22hello%22%0D%0A%7D&allow=%7B%0D%0A++++%22unauthenticated%22%3A+true%0D%0A%7D>`__):
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow:
|
||||
unauthenticated: true
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": {
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
unauthenticated: true
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"unauthenticated": true
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
Allow keys act as an "or" mechanism. An actor will be able to execute the query if any of their JSON properties match any of the values in the corresponding lists in the ``allow`` block. The following block will allow users with either a ``role`` of ``"ops"`` OR users who have an ``id`` of ``"simon"`` or ``"cleopaws"``:
|
||||
|
||||
.. code-block:: json
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
allow:
|
||||
id:
|
||||
- simon
|
||||
- cleopaws
|
||||
role: ops
|
||||
""").strip(),
|
||||
"YAML", "JSON"
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": ["simon", "cleopaws"],
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow:
|
||||
id:
|
||||
- simon
|
||||
- cleopaws
|
||||
role: ops
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": [
|
||||
"simon",
|
||||
"cleopaws"
|
||||
],
|
||||
"role": "ops"
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
`Demo for cleopaws <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22cleopaws%22%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%5B%0D%0A++++++++%22simon%22%2C%0D%0A++++++++%22cleopaws%22%0D%0A++++%5D%2C%0D%0A++++%22role%22%3A+%22ops%22%0D%0A%7D>`__, `demo for ops role <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22trevor%22%2C%0D%0A++++%22role%22%3A+%5B%0D%0A++++++++%22ops%22%2C%0D%0A++++++++%22staff%22%0D%0A++++%5D%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%5B%0D%0A++++++++%22simon%22%2C%0D%0A++++++++%22cleopaws%22%0D%0A++++%5D%2C%0D%0A++++%22role%22%3A+%22ops%22%0D%0A%7D>`__, `demo for an actor matching neither rule <https://latest.datasette.io/-/allow-debug?actor=%7B%0D%0A++++%22id%22%3A+%22percy%22%2C%0D%0A++++%22role%22%3A+%5B%0D%0A++++++++%22staff%22%0D%0A++++%5D%0D%0A%7D&allow=%7B%0D%0A++++%22id%22%3A+%5B%0D%0A++++++++%22simon%22%2C%0D%0A++++++++%22cleopaws%22%0D%0A++++%5D%2C%0D%0A++++%22role%22%3A+%22ops%22%0D%0A%7D>`__.
|
||||
|
||||
|
@ -183,18 +395,18 @@ The /-/allow-debug tool
|
|||
|
||||
The ``/-/allow-debug`` tool lets you try out different ``"action"`` blocks against different ``"actor"`` JSON objects. You can try that out here: https://latest.datasette.io/-/allow-debug
|
||||
|
||||
.. _authentication_permissions_metadata:
|
||||
.. _authentication_permissions_config:
|
||||
|
||||
Access permissions in metadata
|
||||
==============================
|
||||
Access permissions in ``datasette.yaml``
|
||||
========================================
|
||||
|
||||
There are two ways to configure permissions using ``metadata.json`` (or ``metadata.yaml``).
|
||||
There are two ways to configure permissions using ``datasette.yaml`` (or ``datasette.json``).
|
||||
|
||||
For simple visibility permissions you can use ``"allow"`` blocks in the root, database, table and query sections.
|
||||
|
||||
For other permissions you can use a ``"permissions"`` block, described :ref:`in the next section <authentication_permissions_other>`.
|
||||
|
||||
You can limit who is allowed to view different parts of your Datasette instance using ``"allow"`` keys in your :ref:`metadata` configuration.
|
||||
You can limit who is allowed to view different parts of your Datasette instance using ``"allow"`` keys in your :ref:`configuration`.
|
||||
|
||||
You can control the following:
|
||||
|
||||
|
@ -213,25 +425,25 @@ Access to an instance
|
|||
Here's how to restrict access to your entire Datasette instance to just the ``"id": "root"`` user:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import metadata_example
|
||||
metadata_example(cog, {
|
||||
"title": "My private Datasette instance",
|
||||
"allow": {
|
||||
"id": "root"
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
from metadata_doc import config_example
|
||||
config_example(cog, """
|
||||
title: My private Datasette instance
|
||||
allow:
|
||||
id: root
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
title: My private Datasette instance
|
||||
allow:
|
||||
id: root
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -246,21 +458,22 @@ Here's how to restrict access to your entire Datasette instance to just the ``"i
|
|||
To deny access to all users, you can use ``"allow": false``:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"title": "My entirely inaccessible instance",
|
||||
"allow": False
|
||||
})
|
||||
config_example(cog, """
|
||||
title: My entirely inaccessible instance
|
||||
allow: false
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
title: My entirely inaccessible instance
|
||||
allow: false
|
||||
|
||||
title: My entirely inaccessible instance
|
||||
allow: false
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -280,28 +493,26 @@ Access to specific databases
|
|||
To limit access to a specific ``private.db`` database to just authenticated users, use the ``"allow"`` block like this:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"private": {
|
||||
"allow": {
|
||||
"id": "*"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
databases:
|
||||
private:
|
||||
allow:
|
||||
id: '*'
|
||||
id: "*"
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
databases:
|
||||
private:
|
||||
allow:
|
||||
id: "*"
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -324,34 +535,30 @@ Access to specific tables and views
|
|||
To limit access to the ``users`` table in your ``bakery.db`` database:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"bakery": {
|
||||
"tables": {
|
||||
"users": {
|
||||
"allow": {
|
||||
"id": "*"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
databases:
|
||||
bakery:
|
||||
tables:
|
||||
users:
|
||||
allow:
|
||||
id: '*'
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
databases:
|
||||
bakery:
|
||||
tables:
|
||||
users:
|
||||
allow:
|
||||
id: '*'
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -382,32 +589,12 @@ This works for SQL views as well - you can list their names in the ``"tables"``
|
|||
Access to specific canned queries
|
||||
---------------------------------
|
||||
|
||||
:ref:`canned_queries` allow you to configure named SQL queries in your ``metadata.json`` that can be executed by users. These queries can be set up to both read and write to the database, so controlling who can execute them can be important.
|
||||
:ref:`canned_queries` allow you to configure named SQL queries in your ``datasette.yaml`` that can be executed by users. These queries can be set up to both read and write to the database, so controlling who can execute them can be important.
|
||||
|
||||
To limit access to the ``add_name`` canned query in your ``dogs.db`` database to just the :ref:`root user<authentication_root>`:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"dogs": {
|
||||
"queries": {
|
||||
"add_name": {
|
||||
"sql": "INSERT INTO names (name) VALUES (:name)",
|
||||
"write": True,
|
||||
"allow": {
|
||||
"id": ["root"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
databases:
|
||||
dogs:
|
||||
queries:
|
||||
|
@ -417,9 +604,26 @@ To limit access to the ``add_name`` canned query in your ``dogs.db`` database to
|
|||
allow:
|
||||
id:
|
||||
- root
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
databases:
|
||||
dogs:
|
||||
queries:
|
||||
add_name:
|
||||
sql: INSERT INTO names (name) VALUES (:name)
|
||||
write: true
|
||||
allow:
|
||||
id:
|
||||
- root
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -458,19 +662,20 @@ You can alternatively use an ``"allow_sql"`` block to control who is allowed to
|
|||
To prevent any user from executing arbitrary SQL queries, use this:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"allow_sql": False
|
||||
})
|
||||
config_example(cog, """
|
||||
allow_sql: false
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow_sql: false
|
||||
|
||||
allow_sql: false
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -482,22 +687,22 @@ To prevent any user from executing arbitrary SQL queries, use this:
|
|||
To enable just the :ref:`root user<authentication_root>` to execute SQL for all databases in your instance, use the following:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"allow_sql": {
|
||||
"id": "root"
|
||||
}
|
||||
})
|
||||
config_example(cog, """
|
||||
allow_sql:
|
||||
id: root
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
allow_sql:
|
||||
id: root
|
||||
|
||||
allow_sql:
|
||||
id: root
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -511,28 +716,26 @@ To enable just the :ref:`root user<authentication_root>` to execute SQL for all
|
|||
To limit this ability for just one specific database, use this:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"mydatabase": {
|
||||
"allow_sql": {
|
||||
"id": "root"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
databases:
|
||||
mydatabase:
|
||||
allow_sql:
|
||||
id: root
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
databases:
|
||||
mydatabase:
|
||||
allow_sql:
|
||||
id: root
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -549,33 +752,32 @@ To limit this ability for just one specific database, use this:
|
|||
|
||||
.. _authentication_permissions_other:
|
||||
|
||||
Other permissions in metadata
|
||||
=============================
|
||||
Other permissions in ``datasette.yaml``
|
||||
=======================================
|
||||
|
||||
For all other permissions, you can use one or more ``"permissions"`` blocks in your metadata.
|
||||
For all other permissions, you can use one or more ``"permissions"`` blocks in your ``datasette.yaml`` configuration file.
|
||||
|
||||
To grant access to the :ref:`permissions debug tool <PermissionsDebugView>` to all signed in users you can grant ``permissions-debug`` to any actor with an ``id`` matching the wildcard ``*`` by adding this a the root of your metadata:
|
||||
To grant access to the :ref:`permissions debug tool <PermissionsDebugView>` to all signed in users, you can grant ``permissions-debug`` to any actor with an ``id`` matching the wildcard ``*`` by adding this a the root of your configuration:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"permissions": {
|
||||
"debug-menu": {
|
||||
"id": "*"
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
permissions:
|
||||
debug-menu:
|
||||
id: '*'
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
permissions:
|
||||
debug-menu:
|
||||
id: '*'
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -591,31 +793,28 @@ To grant access to the :ref:`permissions debug tool <PermissionsDebugView>` to a
|
|||
To grant ``create-table`` to the user with ``id`` of ``editor`` for the ``docs`` database:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"docs": {
|
||||
"permissions": {
|
||||
"create-table": {
|
||||
"id": "editor"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
databases:
|
||||
docs:
|
||||
permissions:
|
||||
create-table:
|
||||
id: editor
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
databases:
|
||||
docs:
|
||||
permissions:
|
||||
create-table:
|
||||
id: editor
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -635,27 +834,7 @@ To grant ``create-table`` to the user with ``id`` of ``editor`` for the ``docs``
|
|||
And for ``insert-row`` against the ``reports`` table in that ``docs`` database:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"databases": {
|
||||
"docs": {
|
||||
"tables": {
|
||||
"reports": {
|
||||
"permissions": {
|
||||
"insert-row": {
|
||||
"id": "editor"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config_example(cog, """
|
||||
databases:
|
||||
docs:
|
||||
tables:
|
||||
|
@ -663,9 +842,24 @@ And for ``insert-row`` against the ``reports`` table in that ``docs`` database:
|
|||
permissions:
|
||||
insert-row:
|
||||
id: editor
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
databases:
|
||||
docs:
|
||||
tables:
|
||||
reports:
|
||||
permissions:
|
||||
insert-row:
|
||||
id: editor
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -1040,6 +1234,18 @@ Actor is allowed to create a database table.
|
|||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_alter_table:
|
||||
|
||||
alter-table
|
||||
-----------
|
||||
|
||||
Actor is allowed to alter a database table.
|
||||
|
||||
``resource`` - tuple: (string, string)
|
||||
The name of the database, then the name of the table
|
||||
|
||||
Default *deny*.
|
||||
|
||||
.. _permissions_drop_table:
|
||||
|
||||
drop-table
|
||||
|
|
|
@ -4,6 +4,245 @@
|
|||
Changelog
|
||||
=========
|
||||
|
||||
.. _v1_0_a13:
|
||||
|
||||
1.0a13 (2024-03-12)
|
||||
-------------------
|
||||
|
||||
Each of the key concepts in Datasette now has an :ref:`actions menu <plugin_actions>`, which plugins can use to add additional functionality targeting that entity.
|
||||
|
||||
- Plugin hook: :ref:`view_actions() <plugin_hook_view_actions>` for actions that can be applied to a SQL view. (:issue:`2297`)
|
||||
- Plugin hook: :ref:`homepage_actions() <plugin_hook_homepage_actions>` for actions that apply to the instance homepage. (:issue:`2298`)
|
||||
- Plugin hook: :ref:`row_actions() <plugin_hook_row_actions>` for actions that apply to the row page. (:issue:`2299`)
|
||||
- :ref:`Plugin hooks <plugin_hooks>` documentation page is now organized with additional headings. (:issue:`2300`)
|
||||
- Improved the display of action buttons on pages that also display metadata. (:issue:`2286`)
|
||||
- The header and footer of the page now uses a subtle gradient effect, and options in the navigation menu are better visually defined. (:issue:`2302`)
|
||||
- Table names that start with an underscore now default to hidden. (:issue:`2104`)
|
||||
|
||||
.. _v1_0_a12:
|
||||
|
||||
1.0a12 (2024-02-29)
|
||||
-------------------
|
||||
|
||||
- New :ref:`query_actions() <plugin_hook_query_actions>` plugin hook, similar to :ref:`table_actions() <plugin_hook_table_actions>` and :ref:`database_actions() <plugin_hook_database_actions>`. Can be used to add a menu of actions to the canned query or arbitrary SQL query page. (:issue:`2283`)
|
||||
- New design for the button that opens the query, table and database actions menu. (:issue:`2281`)
|
||||
- "does not contain" table filter for finding rows that do not contain a string. (:issue:`2287`)
|
||||
- Fixed a bug in the :ref:`javascript_plugins_makeColumnActions` JavaScript plugin mechanism where the column action menu was not fully reset in between each interaction. (:issue:`2289`)
|
||||
|
||||
.. _v1_0_a11:
|
||||
|
||||
1.0a11 (2024-02-19)
|
||||
-------------------
|
||||
|
||||
- The ``"replace": true`` argument to the ``/db/table/-/insert`` API now requires the actor to have the ``update-row`` permission. (:issue:`2279`)
|
||||
- Fixed some UI bugs in the interactive permissions debugging tool. (:issue:`2278`)
|
||||
- The column action menu now aligns better with the cog icon, and positions itself taking into account the width of the browser window. (:issue:`2263`)
|
||||
|
||||
.. _v1_0_a10:
|
||||
|
||||
1.0a10 (2024-02-17)
|
||||
-------------------
|
||||
|
||||
The only changes in this alpha correspond to the way Datasette handles database transactions. (:issue:`2277`)
|
||||
|
||||
- The :ref:`database.execute_write_fn() <database_execute_write_fn>` method has a new ``transaction=True`` parameter. This defaults to ``True`` which means all functions executed using this method are now automatically wrapped in a transaction - previously the functions needed to roll transaction handling on their own, and many did not.
|
||||
- Pass ``transaction=False`` to ``execute_write_fn()`` if you want to manually handle transactions in your function.
|
||||
- Several internal Datasette features, including parts of the :ref:`JSON write API <json_api_write>`, had been failing to wrap their operations in a transaction. This has been fixed by the new ``transaction=True`` default.
|
||||
|
||||
.. _v1_0_a9:
|
||||
|
||||
1.0a9 (2024-02-16)
|
||||
------------------
|
||||
|
||||
This alpha release adds basic alter table support to the Datasette Write API and fixes a permissions bug relating to the ``/upsert`` API endpoint.
|
||||
|
||||
Alter table support for create, insert, upsert and update
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The :ref:`JSON write API <json_api_write>` can now be used to apply simple alter table schema changes, provided the acting actor has the new :ref:`permissions_alter_table` permission. (:issue:`2101`)
|
||||
|
||||
The only alter operation supported so far is adding new columns to an existing table.
|
||||
|
||||
* The :ref:`/db/-/create <TableCreateView>` API now adds new columns during large operations to create a table based on incoming example ``"rows"``, in the case where one of the later rows includes columns that were not present in the earlier batches. This requires the ``create-table`` but not the ``alter-table`` permission.
|
||||
* When ``/db/-/create`` is called with rows in a situation where the table may have been already created, an ``"alter": true`` key can be included to indicate that any missing columns from the new rows should be added to the table. This requires the ``alter-table`` permission.
|
||||
* :ref:`/db/table/-/insert <TableInsertView>` and :ref:`/db/table/-/upsert <TableUpsertView>` and :ref:`/db/table/row-pks/-/update <RowUpdateView>` all now also accept ``"alter": true``, depending on the ``alter-table`` permission.
|
||||
|
||||
Operations that alter a table now fire the new :ref:`alter-table event <events>`.
|
||||
|
||||
Permissions fix for the upsert API
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The :ref:`/database/table/-/upsert API <TableUpsertView>` had a minor permissions bug, only affecting Datasette instances that had configured the ``insert-row`` and ``update-row`` permissions to apply to a specific table rather than the database or instance as a whole. Full details in issue :issue:`2262`.
|
||||
|
||||
To avoid similar mistakes in the future the :ref:`datasette.permission_allowed() <datasette_permission_allowed>` method now specifies ``default=`` as a keyword-only argument.
|
||||
|
||||
Permission checks now consider opinions from every plugin
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The :ref:`datasette.permission_allowed() <datasette_permission_allowed>` method previously consulted every plugin that implemented the :ref:`permission_allowed() <plugin_hook_permission_allowed>` plugin hook and obeyed the opinion of the last plugin to return a value. (:issue:`2275`)
|
||||
|
||||
Datasette now consults every plugin and checks to see if any of them returned ``False`` (the veto rule), and if none of them did, it then checks to see if any of them returned ``True``.
|
||||
|
||||
This is explained at length in the new documentation covering :ref:`authentication_permissions_explained`.
|
||||
|
||||
Other changes
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- The new :ref:`DATASETTE_TRACE_PLUGINS=1 environment variable <writing_plugins_tracing>` turns on detailed trace output for every executed plugin hook, useful for debugging and understanding how the plugin system works at a low level. (:issue:`2274`)
|
||||
- Datasette on Python 3.9 or above marks its non-cryptographic uses of the MD5 hash function as ``usedforsecurity=False``, for compatibility with FIPS systems. (:issue:`2270`)
|
||||
- SQL relating to :ref:`internals_internal` now executes inside a transaction, avoiding a potential database locked error. (:issue:`2273`)
|
||||
- The ``/-/threads`` debug page now identifies the database in the name associated with each dedicated write thread. (:issue:`2265`)
|
||||
- The ``/db/-/create`` API now fires a ``insert-rows`` event if rows were inserted after the table was created. (:issue:`2260`)
|
||||
|
||||
.. _v1_0_a8:
|
||||
|
||||
1.0a8 (2024-02-07)
|
||||
------------------
|
||||
|
||||
This alpha release continues the migration of Datasette's configuration from ``metadata.yaml`` to the new ``datasette.yaml`` configuration file, introduces a new system for JavaScript plugins and adds several new plugin hooks.
|
||||
|
||||
See `Datasette 1.0a8: JavaScript plugins, new plugin hooks and plugin configuration in datasette.yaml <https://simonwillison.net/2024/Feb/7/datasette-1a8/>`__ for an annotated version of these release notes.
|
||||
|
||||
Configuration
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- Plugin configuration now lives in the :ref:`datasette.yaml configuration file <configuration>`, passed to Datasette using the ``-c/--config`` option. Thanks, Alex Garcia. (:issue:`2093`)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette -c datasette.yaml
|
||||
|
||||
Where ``datasette.yaml`` contains configuration that looks like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
plugins:
|
||||
datasette-cluster-map:
|
||||
latitude_column: xlat
|
||||
longitude_column: xlon
|
||||
|
||||
Previously plugins were configured in ``metadata.yaml``, which was confusing as plugin settings were unrelated to database and table metadata.
|
||||
- The ``-s/--setting`` option can now be used to set plugin configuration as well. See :ref:`configuration_cli` for details. (:issue:`2252`)
|
||||
|
||||
The above YAML configuration example using ``-s/--setting`` looks like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette mydatabase.db \
|
||||
-s plugins.datasette-cluster-map.latitude_column xlat \
|
||||
-s plugins.datasette-cluster-map.longitude_column xlon
|
||||
|
||||
- The new ``/-/config`` page shows the current instance configuration, after redacting keys that could contain sensitive data such as API keys or passwords. (:issue:`2254`)
|
||||
|
||||
- Existing Datasette installations may already have configuration set in ``metadata.yaml`` that should be migrated to ``datasette.yaml``. To avoid breaking these installations, Datasette will silently treat table configuration, plugin configuration and allow blocks in metadata as if they had been specified in configuration instead. (:issue:`2247`) (:issue:`2248`) (:issue:`2249`)
|
||||
|
||||
Note that the ``datasette publish`` command has not yet been updated to accept a ``datasette.yaml`` configuration file. This will be addressed in :issue:`2195` but for the moment you can include those settings in ``metadata.yaml`` instead.
|
||||
|
||||
JavaScript plugins
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Datasette now includes a :ref:`JavaScript plugins mechanism <javascript_plugins>`, allowing JavaScript to customize Datasette in a way that can collaborate with other plugins.
|
||||
|
||||
This provides two initial hooks, with more to come in the future:
|
||||
|
||||
- :ref:`makeAboveTablePanelConfigs() <javascript_plugins_makeAboveTablePanelConfigs>` can add additional panels to the top of the table page.
|
||||
- :ref:`makeColumnActions() <javascript_plugins_makeColumnActions>` can add additional actions to the column menu.
|
||||
|
||||
Thanks `Cameron Yick <https://github.com/hydrosquall>`__ for contributing this feature. (`#2052 <https://github.com/simonw/datasette/pull/2052>`__)
|
||||
|
||||
Plugin hooks
|
||||
~~~~~~~~~~~~
|
||||
|
||||
- New :ref:`plugin_hook_jinja2_environment_from_request` plugin hook, which can be used to customize the current Jinja environment based on the incoming request. This can be used to modify the template lookup path based on the incoming request hostname, among other things. (:issue:`2225`)
|
||||
- New :ref:`family of template slot plugin hooks <plugin_hook_slots>`: ``top_homepage``, ``top_database``, ``top_table``, ``top_row``, ``top_query``, ``top_canned_query``. Plugins can use these to provide additional HTML to be injected at the top of the corresponding pages. (:issue:`1191`)
|
||||
- New :ref:`track_event() mechanism <plugin_event_tracking>` for plugins to emit and receive events when certain events occur within Datasette. (:issue:`2240`)
|
||||
- Plugins can register additional event classes using :ref:`plugin_hook_register_events`.
|
||||
- They can then trigger those events with the :ref:`datasette.track_event(event) <datasette_track_event>` internal method.
|
||||
- Plugins can subscribe to notifications of events using the :ref:`plugin_hook_track_event` plugin hook.
|
||||
- Datasette core now emits ``login``, ``logout``, ``create-token``, ``create-table``, ``drop-table``, ``insert-rows``, ``upsert-rows``, ``update-row``, ``delete-row`` events, :ref:`documented here <events>`.
|
||||
- New internal function for plugin authors: :ref:`database_execute_isolated_fn`, for creating a new SQLite connection, executing code and then closing that connection, all while preventing other code from writing to that particular database. This connection will not have the :ref:`prepare_connection() <plugin_hook_prepare_connection>` plugin hook executed against it, allowing plugins to perform actions that might otherwise be blocked by existing connection configuration. (:issue:`2218`)
|
||||
|
||||
Documentation
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- Documentation describing :ref:`how to write tests that use signed actor cookies <testing_datasette_client>` using ``datasette.client.actor_cookie()``. (:issue:`1830`)
|
||||
- Documentation on how to :ref:`register a plugin for the duration of a test <testing_plugins_register_in_test>`. (:issue:`2234`)
|
||||
- The :ref:`configuration documentation <configuration>` now shows examples of both YAML and JSON for each setting.
|
||||
|
||||
Minor fixes
|
||||
~~~~~~~~~~~
|
||||
|
||||
- Datasette no longer attempts to run SQL queries in parallel when rendering a table page, as this was leading to some rare crashing bugs. (:issue:`2189`)
|
||||
- Fixed warning: ``DeprecationWarning: pkg_resources is deprecated as an API`` (:issue:`2057`)
|
||||
- Fixed bug where ``?_extra=columns`` parameter returned an incorrectly shaped response. (:issue:`2230`)
|
||||
|
||||
.. _v0_64_6:
|
||||
|
||||
0.64.6 (2023-12-22)
|
||||
-------------------
|
||||
|
||||
- Fixed a bug where CSV export with expanded labels could fail if a foreign key reference did not correctly resolve. (:issue:`2214`)
|
||||
|
||||
.. _v0_64_5:
|
||||
|
||||
0.64.5 (2023-10-08)
|
||||
-------------------
|
||||
|
||||
- Dropped dependency on ``click-default-group-wheel``, which could cause a dependency conflict. (:issue:`2197`)
|
||||
|
||||
.. _v1_0_a7:
|
||||
|
||||
1.0a7 (2023-09-21)
|
||||
------------------
|
||||
|
||||
- Fix for a crashing bug caused by viewing the table page for a named in-memory database. (:issue:`2189`)
|
||||
|
||||
.. _v0_64_4:
|
||||
|
||||
0.64.4 (2023-09-21)
|
||||
-------------------
|
||||
|
||||
- Fix for a crashing bug caused by viewing the table page for a named in-memory database. (:issue:`2189`)
|
||||
|
||||
.. _v1_0_a6:
|
||||
|
||||
1.0a6 (2023-09-07)
|
||||
------------------
|
||||
|
||||
- New plugin hook: :ref:`plugin_hook_actors_from_ids` and an internal method to accompany it, :ref:`datasette_actors_from_ids`. This mechanism is intended to be used by plugins that may need to display the actor who was responsible for something managed by that plugin: they can now resolve the recorded IDs of actors into the full actor objects. (:issue:`2181`)
|
||||
- ``DATASETTE_LOAD_PLUGINS`` environment variable for :ref:`controlling which plugins <plugins_datasette_load_plugins>` are loaded by Datasette. (:issue:`2164`)
|
||||
- Datasette now checks if the user has permission to view a table linked to by a foreign key before turning that foreign key into a clickable link. (:issue:`2178`)
|
||||
- The ``execute-sql`` permission now implies that the actor can also view the database and instance. (:issue:`2169`)
|
||||
- Documentation describing a pattern for building plugins that themselves :ref:`define further hooks <writing_plugins_extra_hooks>` for other plugins. (:issue:`1765`)
|
||||
- Datasette is now tested against the Python 3.12 preview. (`#2175 <https://github.com/simonw/datasette/pull/2175>`__)
|
||||
|
||||
.. _v1_0_a5:
|
||||
|
||||
1.0a5 (2023-08-29)
|
||||
------------------
|
||||
|
||||
- When restrictions are applied to :ref:`API tokens <CreateTokenView>`, those restrictions now behave slightly differently: applying the ``view-table`` restriction will imply the ability to ``view-database`` for the database containing that table, and both ``view-table`` and ``view-database`` will imply ``view-instance``. Previously you needed to create a token with restrictions that explicitly listed ``view-instance`` and ``view-database`` and ``view-table`` in order to view a table without getting a permission denied error. (:issue:`2102`)
|
||||
- New ``datasette.yaml`` (or ``.json``) configuration file, which can be specified using ``datasette -c path-to-file``. The goal here to consolidate settings, plugin configuration, permissions, canned queries, and other Datasette configuration into a single single file, separate from ``metadata.yaml``. The legacy ``settings.json`` config file used for :ref:`config_dir` has been removed, and ``datasette.yaml`` has a ``"settings"`` section where the same settings key/value pairs can be included. In the next future alpha release, more configuration such as plugins/permissions/canned queries will be moved to the ``datasette.yaml`` file. See :issue:`2093` for more details. Thanks, Alex Garcia.
|
||||
- The ``-s/--setting`` option can now take dotted paths to nested settings. These will then be used to set or over-ride the same options as are present in the new configuration file. (:issue:`2156`)
|
||||
- New ``--actor '{"id": "json-goes-here"}'`` option for use with ``datasette --get`` to treat the simulated request as being made by a specific actor, see :ref:`cli_datasette_get`. (:issue:`2153`)
|
||||
- The Datasette ``_internal`` database has had some changes. It no longer shows up in the ``datasette.databases`` list by default, and is now instead available to plugins using the ``datasette.get_internal_database()``. Plugins are invited to use this as a private database to store configuration and settings and secrets that should not be made visible through the default Datasette interface. Users can pass the new ``--internal internal.db`` option to persist that internal database to disk. Thanks, Alex Garcia. (:issue:`2157`).
|
||||
|
||||
.. _v1_0_a4:
|
||||
|
||||
1.0a4 (2023-08-21)
|
||||
------------------
|
||||
|
||||
This alpha fixes a security issue with the ``/-/api`` API explorer. On authenticated Datasette instances (instances protected using plugins such as `datasette-auth-passwords <https://datasette.io/plugins/datasette-auth-passwords>`__) the API explorer interface could reveal the names of databases and tables within the protected instance. The data stored in those tables was not revealed.
|
||||
|
||||
For more information and workarounds, read `the security advisory <https://github.com/simonw/datasette/security/advisories/GHSA-7ch3-7pp7-7cpq>`__. The issue has been present in every previous alpha version of Datasette 1.0: versions 1.0a0, 1.0a1, 1.0a2 and 1.0a3.
|
||||
|
||||
Also in this alpha:
|
||||
|
||||
- The new ``datasette plugins --requirements`` option outputs a list of currently installed plugins in Python ``requirements.txt`` format, useful for duplicating that installation elsewhere. (:issue:`2133`)
|
||||
- :ref:`canned_queries_writable` can now define a ``on_success_message_sql`` field in their configuration, containing a SQL query that should be executed upon successful completion of the write operation in order to generate a message to be shown to the user. (:issue:`2138`)
|
||||
- The automatically generated border color for a database is now shown in more places around the application. (:issue:`2119`)
|
||||
- Every instance of example shell script code in the documentation should now include a working copy button, free from additional syntax. (:issue:`2140`)
|
||||
|
||||
.. _v1_0_a3:
|
||||
|
||||
1.0a3 (2023-08-09)
|
||||
|
@ -213,7 +452,7 @@ Documentation
|
|||
|
||||
Datasette can now run entirely in your browser using WebAssembly. Try out `Datasette Lite <https://lite.datasette.io/>`__, take a look `at the code <https://github.com/simonw/datasette-lite>`__ or read more about it in `Datasette Lite: a server-side Python web application running in a browser <https://simonwillison.net/2022/May/4/datasette-lite/>`__.
|
||||
|
||||
Datasette now has a `Discord community <https://discord.gg/ktd74dm5mw>`__ for questions and discussions about Datasette and its ecosystem of projects.
|
||||
Datasette now has a `Discord community <https://datasette.io/discord>`__ for questions and discussions about Datasette and its ecosystem of projects.
|
||||
|
||||
Features
|
||||
~~~~~~~~
|
||||
|
@ -543,7 +782,7 @@ JavaScript modules
|
|||
|
||||
To use modules, JavaScript needs to be included in ``<script>`` tags with a ``type="module"`` attribute.
|
||||
|
||||
Datasette now has the ability to output ``<script type="module">`` in places where you may wish to take advantage of modules. The ``extra_js_urls`` option described in :ref:`customization_css_and_javascript` can now be used with modules, and module support is also available for the :ref:`extra_body_script() <plugin_hook_extra_body_script>` plugin hook. (:issue:`1186`, :issue:`1187`)
|
||||
Datasette now has the ability to output ``<script type="module">`` in places where you may wish to take advantage of modules. The ``extra_js_urls`` option described in :ref:`configuration_reference_css_js` can now be used with modules, and module support is also available for the :ref:`extra_body_script() <plugin_hook_extra_body_script>` plugin hook. (:issue:`1186`, :issue:`1187`)
|
||||
|
||||
`datasette-leaflet-freedraw <https://datasette.io/plugins/datasette-leaflet-freedraw>`__ is the first example of a Datasette plugin that takes advantage of the new support for JavaScript modules. See `Drawing shapes on a map to query a SpatiaLite database <https://simonwillison.net/2021/Jan/24/drawing-shapes-spatialite/>`__ for more on this plugin.
|
||||
|
||||
|
@ -924,7 +1163,10 @@ Prior to this release the Datasette ecosystem has treated authentication as excl
|
|||
|
||||
You'll need to install plugins if you want full user accounts, but default Datasette can now authenticate a single root user with the new ``--root`` command-line option, which outputs a one-time use URL to :ref:`authenticate as a root actor <authentication_root>` (:issue:`784`)::
|
||||
|
||||
$ datasette fixtures.db --root
|
||||
datasette fixtures.db --root
|
||||
|
||||
::
|
||||
|
||||
http://127.0.0.1:8001/-/auth-token?token=5b632f8cd44b868df625f5a6e2185d88eea5b22237fd3cc8773f107cc4fd6477
|
||||
INFO: Started server process [14973]
|
||||
INFO: Waiting for application startup.
|
||||
|
@ -1095,7 +1337,7 @@ You can now create :ref:`custom pages <custom_pages>` within your Datasette inst
|
|||
|
||||
:ref:`config_dir` (:issue:`731`) allows you to define a custom Datasette instance as a directory. So instead of running the following::
|
||||
|
||||
$ datasette one.db two.db \
|
||||
datasette one.db two.db \
|
||||
--metadata=metadata.json \
|
||||
--template-dir=templates/ \
|
||||
--plugins-dir=plugins \
|
||||
|
@ -1103,7 +1345,7 @@ You can now create :ref:`custom pages <custom_pages>` within your Datasette inst
|
|||
|
||||
You can instead arrange your files in a single directory called ``my-project`` and run this::
|
||||
|
||||
$ datasette my-project/
|
||||
datasette my-project/
|
||||
|
||||
Also in this release:
|
||||
|
||||
|
@ -1120,7 +1362,7 @@ Also in this release:
|
|||
0.40 (2020-04-21)
|
||||
-----------------
|
||||
|
||||
* Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (:issue:`713`)
|
||||
* Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. (:issue:`713`)
|
||||
* Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now <https://github.com/simonw/datasette-publish-now>`__, can be installed to publish data to Zeit (`now Vercel <https://vercel.com/blog/zeit-is-now-vercel>`__) Now v2. (:issue:`710`)
|
||||
* Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (:issue:`716`)
|
||||
* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`setting_template_debug`). (:issue:`693`)
|
||||
|
@ -1775,7 +2017,10 @@ In addition to the work on facets:
|
|||
|
||||
Added new help section::
|
||||
|
||||
$ datasette --help-config
|
||||
datasette --help-config
|
||||
|
||||
::
|
||||
|
||||
Config options:
|
||||
default_page_size Default page size for the table view
|
||||
(default=100)
|
||||
|
|
|
@ -112,10 +112,9 @@ Once started you can access it at ``http://localhost:8001``
|
|||
--static MOUNT:DIRECTORY Serve static files from this directory at
|
||||
/MOUNT/...
|
||||
--memory Make /_memory database available
|
||||
--config CONFIG Deprecated: set config option using
|
||||
configname:value. Use --setting instead.
|
||||
--setting SETTING... Setting, see
|
||||
docs.datasette.io/en/stable/settings.html
|
||||
-c, --config FILENAME Path to JSON/YAML Datasette configuration file
|
||||
-s, --setting SETTING... nested.key, value setting to use in Datasette
|
||||
configuration
|
||||
--secret TEXT Secret used for signing secure values, such as
|
||||
signed cookies
|
||||
--root Output URL that sets a cookie authenticating
|
||||
|
@ -123,6 +122,7 @@ Once started you can access it at ``http://localhost:8001``
|
|||
--get TEXT Run an HTTP GET request against this path,
|
||||
print results and exit
|
||||
--token TEXT API token to send with --get requests
|
||||
--actor TEXT Actor to use for --get requests (JSON string)
|
||||
--version-note TEXT Additional note to show on /-/versions
|
||||
--help-settings Show available settings
|
||||
--pdb Launch debugger on any errors
|
||||
|
@ -134,6 +134,8 @@ Once started you can access it at ``http://localhost:8001``
|
|||
mode
|
||||
--ssl-keyfile TEXT SSL key file
|
||||
--ssl-certfile TEXT SSL certificate file
|
||||
--internal PATH Path to a persistent Datasette internal SQLite
|
||||
database
|
||||
--help Show this message and exit.
|
||||
|
||||
|
||||
|
@ -149,9 +151,14 @@ The ``--get`` option to ``datasette serve`` (or just ``datasette``) specifies th
|
|||
|
||||
This means that all of Datasette's functionality can be accessed directly from the command-line.
|
||||
|
||||
For example::
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette --get '/-/versions.json' | jq .
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
$ datasette --get '/-/versions.json' | jq .
|
||||
{
|
||||
"python": {
|
||||
"version": "3.8.5",
|
||||
|
@ -192,7 +199,13 @@ For example::
|
|||
|
||||
You can use the ``--token TOKEN`` option to send an :ref:`API token <CreateTokenView>` with the simulated request.
|
||||
|
||||
The exit code will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error.
|
||||
Or you can make a request as a specific actor by passing a JSON representation of that actor to ``--actor``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette --memory --actor '{"id": "root"}' --get '/-/actor.json'
|
||||
|
||||
The exit code of ``datasette --get`` will be 0 if the request succeeds and 1 if the request produced an HTTP status code other than 200 - e.g. a 404 or 500 error.
|
||||
|
||||
This lets you use ``datasette --get /`` to run tests against a Datasette application in a continuous integration environment such as GitHub Actions.
|
||||
|
||||
|
|
|
@ -1,2 +1,5 @@
|
|||
ro
|
||||
alls
|
||||
fo
|
||||
ro
|
||||
te
|
||||
ths
|
|
@ -40,6 +40,8 @@ extensions = [
|
|||
if not os.environ.get("DISABLE_SPHINX_INLINE_TABS"):
|
||||
extensions += ["sphinx_inline_tabs"]
|
||||
|
||||
autodoc_member_order = "bysource"
|
||||
|
||||
extlinks = {
|
||||
"issue": ("https://github.com/simonw/datasette/issues/%s", "#%s"),
|
||||
}
|
||||
|
|
|
@ -0,0 +1,638 @@
|
|||
.. _configuration:
|
||||
|
||||
Configuration
|
||||
=============
|
||||
|
||||
Datasette offers several ways to configure your Datasette instances: server settings, plugin configuration, authentication, and more.
|
||||
|
||||
Most configuration can be handled using a ``datasette.yaml`` configuration file, passed to datasette using the ``-c/--config`` flag:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette mydatabase.db --config datasette.yaml
|
||||
|
||||
This file can also use JSON, as ``datasette.json``. YAML is recommended over JSON due to its support for comments and multi-line strings.
|
||||
|
||||
.. _configuration_cli:
|
||||
|
||||
Configuration via the command-line
|
||||
----------------------------------
|
||||
|
||||
The recommended way to configure Datasette is using a ``datasette.yaml`` file passed to ``-c/--config``. You can also pass individual settings to Datasette using the ``-s/--setting`` option, which can be used multiple times:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette mydatabase.db \
|
||||
--setting settings.default_page_size 50 \
|
||||
--setting settings.sql_time_limit_ms 3500
|
||||
|
||||
This option takes dotted-notation for the first argument and a value for the second argument. This means you can use it to set any configuration value that would be valid in a ``datasette.yaml`` file.
|
||||
|
||||
It also works for plugin configuration, for example for `datasette-cluster-map <https://datasette.io/plugins/datasette-cluster-map>`_:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette mydatabase.db \
|
||||
--setting plugins.datasette-cluster-map.latitude_column xlat \
|
||||
--setting plugins.datasette-cluster-map.longitude_column xlon
|
||||
|
||||
If the value you provide is a valid JSON object or list it will be treated as nested data, allowing you to configure plugins that accept lists such as `datasette-proxy-url <https://datasette.io/plugins/datasette-proxy-url>`_:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
datasette mydatabase.db \
|
||||
-s plugins.datasette-proxy-url.paths '[{"path": "/proxy", "backend": "http://example.com/"}]'
|
||||
|
||||
This is equivalent to a ``datasette.yaml`` file containing the following:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
plugins:
|
||||
datasette-proxy-url:
|
||||
paths:
|
||||
- path: /proxy
|
||||
backend: http://example.com/
|
||||
""").strip()
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
plugins:
|
||||
datasette-proxy-url:
|
||||
paths:
|
||||
- path: /proxy
|
||||
backend: http://example.com/
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"plugins": {
|
||||
"datasette-proxy-url": {
|
||||
"paths": [
|
||||
{
|
||||
"path": "/proxy",
|
||||
"backend": "http://example.com/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
.. _configuration_reference:
|
||||
|
||||
``datasette.yaml`` reference
|
||||
----------------------------
|
||||
|
||||
The following example shows some of the valid configuration options that can exist inside ``datasette.yaml``.
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
# Datasette settings block
|
||||
settings:
|
||||
default_page_size: 50
|
||||
sql_time_limit_ms: 3500
|
||||
max_returned_rows: 2000
|
||||
|
||||
# top-level plugin configuration
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: valueA
|
||||
|
||||
# Database and table-level configuration
|
||||
databases:
|
||||
your_db_name:
|
||||
# plugin configuration for the your_db_name database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: valueA
|
||||
tables:
|
||||
your_table_name:
|
||||
allow:
|
||||
# Only the root user can access this table
|
||||
id: root
|
||||
# plugin configuration for the your_table_name table
|
||||
# inside your_db_name database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: valueB
|
||||
""")
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
# Datasette settings block
|
||||
settings:
|
||||
default_page_size: 50
|
||||
sql_time_limit_ms: 3500
|
||||
max_returned_rows: 2000
|
||||
|
||||
# top-level plugin configuration
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: valueA
|
||||
|
||||
# Database and table-level configuration
|
||||
databases:
|
||||
your_db_name:
|
||||
# plugin configuration for the your_db_name database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: valueA
|
||||
tables:
|
||||
your_table_name:
|
||||
allow:
|
||||
# Only the root user can access this table
|
||||
id: root
|
||||
# plugin configuration for the your_table_name table
|
||||
# inside your_db_name database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: valueB
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"settings": {
|
||||
"default_page_size": 50,
|
||||
"sql_time_limit_ms": 3500,
|
||||
"max_returned_rows": 2000
|
||||
},
|
||||
"plugins": {
|
||||
"datasette-my-plugin": {
|
||||
"key": "valueA"
|
||||
}
|
||||
},
|
||||
"databases": {
|
||||
"your_db_name": {
|
||||
"plugins": {
|
||||
"datasette-my-plugin": {
|
||||
"key": "valueA"
|
||||
}
|
||||
},
|
||||
"tables": {
|
||||
"your_table_name": {
|
||||
"allow": {
|
||||
"id": "root"
|
||||
},
|
||||
"plugins": {
|
||||
"datasette-my-plugin": {
|
||||
"key": "valueB"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
.. _configuration_reference_settings:
|
||||
|
||||
Settings
|
||||
~~~~~~~~
|
||||
|
||||
:ref:`settings` can be configured in ``datasette.yaml`` with the ``settings`` key:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
# inside datasette.yaml
|
||||
settings:
|
||||
default_allow_sql: off
|
||||
default_page_size: 50
|
||||
""").strip()
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# inside datasette.yaml
|
||||
settings:
|
||||
default_allow_sql: off
|
||||
default_page_size: 50
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"settings": {
|
||||
"default_allow_sql": "off",
|
||||
"default_page_size": 50
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
The full list of settings is available in the :ref:`settings documentation <settings>`. Settings can also be passed to Datasette using one or more ``--setting name value`` command line options.`
|
||||
|
||||
.. _configuration_reference_plugins:
|
||||
|
||||
Plugin configuration
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`Datasette plugins <plugins>` often require configuration. This plugin configuration should be placed in ``plugins`` keys inside ``datasette.yaml``.
|
||||
|
||||
Most plugins are configured at the top-level of the file, using the ``plugins`` key:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
# inside datasette.yaml
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: my_value
|
||||
""").strip()
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# inside datasette.yaml
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: my_value
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"plugins": {
|
||||
"datasette-my-plugin": {
|
||||
"key": "my_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
Some plugins can be configured at the database or table level. These should use a ``plugins`` key nested under the appropriate place within the ``databases`` object:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
# inside datasette.yaml
|
||||
databases:
|
||||
my_database:
|
||||
# plugin configuration for the my_database database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: my_value
|
||||
my_other_database:
|
||||
tables:
|
||||
my_table:
|
||||
# plugin configuration for the my_table table inside the my_other_database database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: my_value
|
||||
""").strip()
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# inside datasette.yaml
|
||||
databases:
|
||||
my_database:
|
||||
# plugin configuration for the my_database database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: my_value
|
||||
my_other_database:
|
||||
tables:
|
||||
my_table:
|
||||
# plugin configuration for the my_table table inside the my_other_database database
|
||||
plugins:
|
||||
datasette-my-plugin:
|
||||
key: my_value
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"databases": {
|
||||
"my_database": {
|
||||
"plugins": {
|
||||
"datasette-my-plugin": {
|
||||
"key": "my_value"
|
||||
}
|
||||
}
|
||||
},
|
||||
"my_other_database": {
|
||||
"tables": {
|
||||
"my_table": {
|
||||
"plugins": {
|
||||
"datasette-my-plugin": {
|
||||
"key": "my_value"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
|
||||
.. _configuration_reference_permissions:
|
||||
|
||||
Permissions configuration
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Datasette's :ref:`authentication and permissions <authentication>` system can also be configured using ``datasette.yaml``.
|
||||
|
||||
Here is a simple example:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
import textwrap
|
||||
config_example(cog, textwrap.dedent(
|
||||
"""
|
||||
# Instance is only available to users 'sharon' and 'percy':
|
||||
allow:
|
||||
id:
|
||||
- sharon
|
||||
- percy
|
||||
|
||||
# Only 'percy' is allowed access to the accounting database:
|
||||
databases:
|
||||
accounting:
|
||||
allow:
|
||||
id: percy
|
||||
""").strip()
|
||||
)
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
# Instance is only available to users 'sharon' and 'percy':
|
||||
allow:
|
||||
id:
|
||||
- sharon
|
||||
- percy
|
||||
|
||||
# Only 'percy' is allowed access to the accounting database:
|
||||
databases:
|
||||
accounting:
|
||||
allow:
|
||||
id: percy
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"allow": {
|
||||
"id": [
|
||||
"sharon",
|
||||
"percy"
|
||||
]
|
||||
},
|
||||
"databases": {
|
||||
"accounting": {
|
||||
"allow": {
|
||||
"id": "percy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
:ref:`authentication_permissions_config` has the full details.
|
||||
|
||||
.. _configuration_reference_canned_queries:
|
||||
|
||||
Canned queries configuration
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
:ref:`Canned queries <canned_queries>` are named SQL queries that appear in the Datasette interface. They can be configured in ``datasette.yaml`` using the ``queries`` key at the database level:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example, config_example
|
||||
config_example(cog, {
|
||||
"databases": {
|
||||
"sf-trees": {
|
||||
"queries": {
|
||||
"just_species": {
|
||||
"sql": "select qSpecies from Street_Tree_List"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
databases:
|
||||
sf-trees:
|
||||
queries:
|
||||
just_species:
|
||||
sql: select qSpecies from Street_Tree_List
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"databases": {
|
||||
"sf-trees": {
|
||||
"queries": {
|
||||
"just_species": {
|
||||
"sql": "select qSpecies from Street_Tree_List"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
See the :ref:`canned queries documentation <canned_queries>` for more, including how to configure :ref:`writable canned queries <canned_queries_writable>`.
|
||||
|
||||
.. _configuration_reference_css_js:
|
||||
|
||||
Custom CSS and JavaScript
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Datasette can load additional CSS and JavaScript files, configured in ``datasette.yaml`` like this:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import config_example
|
||||
config_example(cog, """
|
||||
extra_css_urls:
|
||||
- https://simonwillison.net/static/css/all.bf8cd891642c.css
|
||||
extra_js_urls:
|
||||
- https://code.jquery.com/jquery-3.2.1.slim.min.js
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
extra_css_urls:
|
||||
- https://simonwillison.net/static/css/all.bf8cd891642c.css
|
||||
extra_js_urls:
|
||||
- https://code.jquery.com/jquery-3.2.1.slim.min.js
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"extra_css_urls": [
|
||||
"https://simonwillison.net/static/css/all.bf8cd891642c.css"
|
||||
],
|
||||
"extra_js_urls": [
|
||||
"https://code.jquery.com/jquery-3.2.1.slim.min.js"
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
The extra CSS and JavaScript files will be linked in the ``<head>`` of every page:
|
||||
|
||||
.. code-block:: html
|
||||
|
||||
<link rel="stylesheet" href="https://simonwillison.net/static/css/all.bf8cd891642c.css">
|
||||
<script src="https://code.jquery.com/jquery-3.2.1.slim.min.js"></script>
|
||||
|
||||
You can also specify a SRI (subresource integrity hash) for these assets:
|
||||
|
||||
.. [[[cog
|
||||
config_example(cog, """
|
||||
extra_css_urls:
|
||||
- url: https://simonwillison.net/static/css/all.bf8cd891642c.css
|
||||
sri: sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI
|
||||
extra_js_urls:
|
||||
- url: https://code.jquery.com/jquery-3.2.1.slim.min.js
|
||||
sri: sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g=
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
extra_css_urls:
|
||||
- url: https://simonwillison.net/static/css/all.bf8cd891642c.css
|
||||
sri: sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI
|
||||
extra_js_urls:
|
||||
- url: https://code.jquery.com/jquery-3.2.1.slim.min.js
|
||||
sri: sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g=
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"extra_css_urls": [
|
||||
{
|
||||
"url": "https://simonwillison.net/static/css/all.bf8cd891642c.css",
|
||||
"sri": "sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI"
|
||||
}
|
||||
],
|
||||
"extra_js_urls": [
|
||||
{
|
||||
"url": "https://code.jquery.com/jquery-3.2.1.slim.min.js",
|
||||
"sri": "sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g="
|
||||
}
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
This will produce:
|
||||
|
||||
.. code-block:: html
|
||||
|
||||
<link rel="stylesheet" href="https://simonwillison.net/static/css/all.bf8cd891642c.css"
|
||||
integrity="sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI"
|
||||
crossorigin="anonymous">
|
||||
<script src="https://code.jquery.com/jquery-3.2.1.slim.min.js"
|
||||
integrity="sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g="
|
||||
crossorigin="anonymous"></script>
|
||||
|
||||
Modern browsers will only execute the stylesheet or JavaScript if the SRI hash
|
||||
matches the content served. You can generate hashes using `www.srihash.org <https://www.srihash.org/>`_
|
||||
|
||||
Items in ``"extra_js_urls"`` can specify ``"module": true`` if they reference JavaScript that uses `JavaScript modules <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules>`__. This configuration:
|
||||
|
||||
.. [[[cog
|
||||
config_example(cog, """
|
||||
extra_js_urls:
|
||||
- url: https://example.datasette.io/module.js
|
||||
module: true
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
extra_js_urls:
|
||||
- url: https://example.datasette.io/module.js
|
||||
module: true
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"extra_js_urls": [
|
||||
{
|
||||
"url": "https://example.datasette.io/module.js",
|
||||
"module": true
|
||||
}
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
Will produce this HTML:
|
||||
|
||||
.. code-block:: html
|
||||
|
||||
<script type="module" src="https://example.datasette.io/module.js"></script>
|
||||
|
||||
|
||||
|
|
@ -133,13 +133,19 @@ Running Black
|
|||
|
||||
Black will be installed when you run ``pip install -e '.[test]'``. To test that your code complies with Black, run the following in your root ``datasette`` repository checkout::
|
||||
|
||||
$ black . --check
|
||||
black . --check
|
||||
|
||||
::
|
||||
|
||||
All done! ✨ 🍰 ✨
|
||||
95 files would be left unchanged.
|
||||
|
||||
If any of your code does not conform to Black you can run this to automatically fix those problems::
|
||||
|
||||
$ black .
|
||||
black .
|
||||
|
||||
::
|
||||
|
||||
reformatted ../datasette/setup.py
|
||||
All done! ✨ 🍰 ✨
|
||||
1 file reformatted, 94 files left unchanged.
|
||||
|
@ -160,11 +166,14 @@ Prettier
|
|||
|
||||
To install Prettier, `install Node.js <https://nodejs.org/en/download/package-manager/>`__ and then run the following in the root of your ``datasette`` repository checkout::
|
||||
|
||||
$ npm install
|
||||
npm install
|
||||
|
||||
This will install Prettier in a ``node_modules`` directory. You can then check that your code matches the coding style like so::
|
||||
|
||||
$ npm run prettier -- --check
|
||||
npm run prettier -- --check
|
||||
|
||||
::
|
||||
|
||||
> prettier
|
||||
> prettier 'datasette/static/*[!.min].js' "--check"
|
||||
|
||||
|
@ -174,7 +183,7 @@ This will install Prettier in a ``node_modules`` directory. You can then check t
|
|||
|
||||
You can fix any problems by running::
|
||||
|
||||
$ npm run fix
|
||||
npm run fix
|
||||
|
||||
.. _contributing_documentation:
|
||||
|
||||
|
@ -245,6 +254,7 @@ Datasette releases are performed using tags. When a new release is published on
|
|||
* Re-point the "latest" tag on Docker Hub to the new image
|
||||
* Build a wheel bundle of the underlying Python source code
|
||||
* Push that new wheel up to PyPI: https://pypi.org/project/datasette/
|
||||
* If the release is an alpha, navigate to https://readthedocs.org/projects/datasette/versions/ and search for the tag name in the "Activate a version" filter, then mark that version as "active" to ensure it will appear on the public ReadTheDocs documentation site.
|
||||
|
||||
To deploy new releases you will need to have push access to the main Datasette GitHub repository.
|
||||
|
||||
|
@ -322,10 +332,17 @@ Upgrading CodeMirror
|
|||
|
||||
Datasette bundles `CodeMirror <https://codemirror.net/>`__ for the SQL editing interface, e.g. on `this page <https://latest.datasette.io/fixtures>`__. Here are the steps for upgrading to a new version of CodeMirror:
|
||||
|
||||
* Install the packages with::
|
||||
|
||||
* Install the packages with `npm i codemirror @codemirror/lang-sql`
|
||||
* Build the bundle using the version number from package.json with:
|
||||
npm i codemirror @codemirror/lang-sql
|
||||
|
||||
node_modules/.bin/rollup datasette/static/cm-editor-6.0.1.js -f iife -n cm -o datasette/static/cm-editor-6.0.1.bundle.js -p @rollup/plugin-node-resolve -p @rollup/plugin-terser
|
||||
* Build the bundle using the version number from package.json with::
|
||||
|
||||
* Update version reference in the `codemirror.html` template
|
||||
node_modules/.bin/rollup datasette/static/cm-editor-6.0.1.js \
|
||||
-f iife \
|
||||
-n cm \
|
||||
-o datasette/static/cm-editor-6.0.1.bundle.js \
|
||||
-p @rollup/plugin-node-resolve \
|
||||
-p @rollup/plugin-terser
|
||||
|
||||
* Update the version reference in the ``codemirror.html`` template.
|
|
@ -5,167 +5,6 @@ Custom pages and templates
|
|||
|
||||
Datasette provides a number of ways of customizing the way data is displayed.
|
||||
|
||||
.. _customization_css_and_javascript:
|
||||
|
||||
Custom CSS and JavaScript
|
||||
-------------------------
|
||||
|
||||
When you launch Datasette, you can specify a custom metadata file like this::
|
||||
|
||||
datasette mydb.db --metadata metadata.yaml
|
||||
|
||||
Your ``metadata.yaml`` file can include links that look like this:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import metadata_example
|
||||
metadata_example(cog, {
|
||||
"extra_css_urls": [
|
||||
"https://simonwillison.net/static/css/all.bf8cd891642c.css"
|
||||
],
|
||||
"extra_js_urls": [
|
||||
"https://code.jquery.com/jquery-3.2.1.slim.min.js"
|
||||
]
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
extra_css_urls:
|
||||
- https://simonwillison.net/static/css/all.bf8cd891642c.css
|
||||
extra_js_urls:
|
||||
- https://code.jquery.com/jquery-3.2.1.slim.min.js
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"extra_css_urls": [
|
||||
"https://simonwillison.net/static/css/all.bf8cd891642c.css"
|
||||
],
|
||||
"extra_js_urls": [
|
||||
"https://code.jquery.com/jquery-3.2.1.slim.min.js"
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
The extra CSS and JavaScript files will be linked in the ``<head>`` of every page:
|
||||
|
||||
.. code-block:: html
|
||||
|
||||
<link rel="stylesheet" href="https://simonwillison.net/static/css/all.bf8cd891642c.css">
|
||||
<script src="https://code.jquery.com/jquery-3.2.1.slim.min.js"></script>
|
||||
|
||||
You can also specify a SRI (subresource integrity hash) for these assets:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"extra_css_urls": [
|
||||
{
|
||||
"url": "https://simonwillison.net/static/css/all.bf8cd891642c.css",
|
||||
"sri": "sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI"
|
||||
}
|
||||
],
|
||||
"extra_js_urls": [
|
||||
{
|
||||
"url": "https://code.jquery.com/jquery-3.2.1.slim.min.js",
|
||||
"sri": "sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g="
|
||||
}
|
||||
]
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
extra_css_urls:
|
||||
- url: https://simonwillison.net/static/css/all.bf8cd891642c.css
|
||||
sri: sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI
|
||||
extra_js_urls:
|
||||
- url: https://code.jquery.com/jquery-3.2.1.slim.min.js
|
||||
sri: sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g=
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"extra_css_urls": [
|
||||
{
|
||||
"url": "https://simonwillison.net/static/css/all.bf8cd891642c.css",
|
||||
"sri": "sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI"
|
||||
}
|
||||
],
|
||||
"extra_js_urls": [
|
||||
{
|
||||
"url": "https://code.jquery.com/jquery-3.2.1.slim.min.js",
|
||||
"sri": "sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g="
|
||||
}
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
This will produce:
|
||||
|
||||
.. code-block:: html
|
||||
|
||||
<link rel="stylesheet" href="https://simonwillison.net/static/css/all.bf8cd891642c.css"
|
||||
integrity="sha384-9qIZekWUyjCyDIf2YK1FRoKiPJq4PHt6tp/ulnuuyRBvazd0hG7pWbE99zvwSznI"
|
||||
crossorigin="anonymous">
|
||||
<script src="https://code.jquery.com/jquery-3.2.1.slim.min.js"
|
||||
integrity="sha256-k2WSCIexGzOj3Euiig+TlR8gA0EmPjuc79OEeY5L45g="
|
||||
crossorigin="anonymous"></script>
|
||||
|
||||
Modern browsers will only execute the stylesheet or JavaScript if the SRI hash
|
||||
matches the content served. You can generate hashes using `www.srihash.org <https://www.srihash.org/>`_
|
||||
|
||||
Items in ``"extra_js_urls"`` can specify ``"module": true`` if they reference JavaScript that uses `JavaScript modules <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules>`__. This configuration:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"extra_js_urls": [
|
||||
{
|
||||
"url": "https://example.datasette.io/module.js",
|
||||
"module": True
|
||||
}
|
||||
]
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
extra_js_urls:
|
||||
- url: https://example.datasette.io/module.js
|
||||
module: true
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"extra_js_urls": [
|
||||
{
|
||||
"url": "https://example.datasette.io/module.js",
|
||||
"module": true
|
||||
}
|
||||
]
|
||||
}
|
||||
.. [[[end]]]
|
||||
|
||||
Will produce this HTML:
|
||||
|
||||
.. code-block:: html
|
||||
|
||||
<script type="module" src="https://example.datasette.io/module.js"></script>
|
||||
|
||||
CSS classes on the <body>
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -259,37 +98,37 @@ Consider the following directory structure::
|
|||
You can start Datasette using ``--static assets:static-files/`` to serve those
|
||||
files from the ``/assets/`` mount point::
|
||||
|
||||
$ datasette -m metadata.json --static assets:static-files/ --memory
|
||||
datasette --config datasette.yaml --static assets:static-files/ --memory
|
||||
|
||||
The following URLs will now serve the content from those CSS and JS files::
|
||||
|
||||
http://localhost:8001/assets/styles.css
|
||||
http://localhost:8001/assets/app.js
|
||||
|
||||
You can reference those files from ``metadata.json`` like so:
|
||||
You can reference those files from ``datasette.yaml`` like this, see :ref:`custom CSS and JavaScript <configuration_reference_css_js>` for more details:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"extra_css_urls": [
|
||||
"/assets/styles.css"
|
||||
],
|
||||
"extra_js_urls": [
|
||||
"/assets/app.js"
|
||||
]
|
||||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
from metadata_doc import config_example
|
||||
config_example(cog, """
|
||||
extra_css_urls:
|
||||
- /assets/styles.css
|
||||
extra_js_urls:
|
||||
- /assets/app.js
|
||||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
extra_css_urls:
|
||||
- /assets/styles.css
|
||||
extra_js_urls:
|
||||
- /assets/app.js
|
||||
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -309,7 +148,7 @@ Publishing static assets
|
|||
The :ref:`cli_publish` command can be used to publish your static assets,
|
||||
using the same syntax as above::
|
||||
|
||||
$ datasette publish cloudrun mydb.db --static assets:static-files/
|
||||
datasette publish cloudrun mydb.db --static assets:static-files/
|
||||
|
||||
This will upload the contents of the ``static-files/`` directory as part of the
|
||||
deployment, and configure Datasette to correctly serve the assets from ``/assets/``.
|
||||
|
@ -442,7 +281,7 @@ You can add templated pages to your Datasette instance by creating HTML files in
|
|||
|
||||
For example, to add a custom page that is served at ``http://localhost/about`` you would create a file in ``templates/pages/about.html``, then start Datasette like this::
|
||||
|
||||
$ datasette mydb.db --template-dir=templates/
|
||||
datasette mydb.db --template-dir=templates/
|
||||
|
||||
You can nest directories within pages to create a nested structure. To create a ``http://localhost:8001/about/map`` page you would create ``templates/pages/about/map.html``.
|
||||
|
||||
|
@ -497,7 +336,7 @@ To serve a custom HTTP header, add a ``custom_header(name, value)`` function cal
|
|||
|
||||
You can verify this is working using ``curl`` like this::
|
||||
|
||||
$ curl -I 'http://127.0.0.1:8001/teapot'
|
||||
curl -I 'http://127.0.0.1:8001/teapot'
|
||||
HTTP/1.1 418
|
||||
date: Sun, 26 Apr 2020 18:38:30 GMT
|
||||
server: uvicorn
|
||||
|
|
|
@ -56,7 +56,7 @@ Create a file at ``/etc/systemd/system/datasette.service`` with the following co
|
|||
|
||||
Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Datasette cookies such as the CSRF token cookie. You can generate a suitable value like so::
|
||||
|
||||
$ python3 -c 'import secrets; print(secrets.token_hex(32))'
|
||||
python3 -c 'import secrets; print(secrets.token_hex(32))'
|
||||
|
||||
This configuration will run Datasette against all database files contained in the ``/home/ubuntu/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details.
|
||||
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
.. _events:
|
||||
|
||||
Events
|
||||
======
|
||||
|
||||
Datasette includes a mechanism for tracking events that occur while the software is running. This is primarily intended to be used by plugins, which can both trigger events and listen for events.
|
||||
|
||||
The core Datasette application triggers events when certain things happen. This page describes those events.
|
||||
|
||||
Plugins can listen for events using the :ref:`plugin_hook_track_event` plugin hook, which will be called with instances of the following classes - or additional classes :ref:`registered by other plugins <plugin_hook_register_events>`.
|
||||
|
||||
.. automodule:: datasette.events
|
||||
:members:
|
||||
:exclude-members: Event
|
|
@ -120,7 +120,7 @@ Here's an example that turns on faceting by default for the ``qLegalStatus`` col
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -132,7 +132,7 @@ Here's an example that turns on faceting by default for the ``qLegalStatus`` col
|
|||
- qLegalStatus
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -164,7 +164,7 @@ You can specify :ref:`array <facet_by_json_array>` or :ref:`date <facet_by_date>
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -173,7 +173,7 @@ You can specify :ref:`array <facet_by_json_array>` or :ref:`date <facet_by_date>
|
|||
- date: created
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -206,7 +206,7 @@ You can change the default facet size (the number of results shown for each face
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -219,7 +219,7 @@ You can change the default facet size (the number of results shown for each face
|
|||
facet_size: 10
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -260,14 +260,17 @@ Speeding up facets with indexes
|
|||
The performance of facets can be greatly improved by adding indexes on the columns you wish to facet by.
|
||||
Adding indexes can be performed using the ``sqlite3`` command-line utility. Here's how to add an index on the ``state`` column in a table called ``Food_Trucks``::
|
||||
|
||||
$ sqlite3 mydatabase.db
|
||||
sqlite3 mydatabase.db
|
||||
|
||||
::
|
||||
|
||||
SQLite version 3.19.3 2017-06-27 16:48:08
|
||||
Enter ".help" for usage hints.
|
||||
sqlite> CREATE INDEX Food_Trucks_state ON Food_Trucks("state");
|
||||
|
||||
Or using the `sqlite-utils <https://sqlite-utils.datasette.io/en/stable/cli.html#creating-indexes>`__ command-line utility::
|
||||
|
||||
$ sqlite-utils create-index mydatabase.db Food_Trucks state
|
||||
sqlite-utils create-index mydatabase.db Food_Trucks state
|
||||
|
||||
.. _facet_by_json_array:
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ Here is an example which enables full-text search (with SQLite advanced search o
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -94,7 +94,7 @@ Here is an example which enables full-text search (with SQLite advanced search o
|
|||
searchmode: raw
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -177,14 +177,14 @@ Configuring FTS using sqlite-utils
|
|||
|
||||
Here's how to use ``sqlite-utils`` to enable full-text search for an ``items`` table across the ``name`` and ``description`` columns::
|
||||
|
||||
$ sqlite-utils enable-fts mydatabase.db items name description
|
||||
sqlite-utils enable-fts mydatabase.db items name description
|
||||
|
||||
Configuring FTS using csvs-to-sqlite
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If your data starts out in CSV files, you can use Datasette's companion tool `csvs-to-sqlite <https://github.com/simonw/csvs-to-sqlite>`__ to convert that file into a SQLite database and enable full-text search on specific columns. For a file called ``items.csv`` where you want full-text search to operate against the ``name`` and ``description`` columns you would run the following::
|
||||
|
||||
$ csvs-to-sqlite items.csv items.db -f name -f description
|
||||
csvs-to-sqlite items.csv items.db -f name -f description
|
||||
|
||||
Configuring FTS by hand
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
|
|
@ -17,7 +17,7 @@ datasette| |discord|
|
|||
.. |docker: datasette| image:: https://img.shields.io/badge/docker-datasette-blue
|
||||
:target: https://hub.docker.com/r/datasetteproject/datasette
|
||||
.. |discord| image:: https://img.shields.io/discord/823971286308356157?label=discord
|
||||
:target: https://discord.gg/ktd74dm5mw
|
||||
:target: https://datasette.io/discord
|
||||
|
||||
*An open source multi-tool for exploring and publishing data*
|
||||
|
||||
|
@ -39,6 +39,7 @@ Contents
|
|||
|
||||
getting_started
|
||||
installation
|
||||
configuration
|
||||
ecosystem
|
||||
cli-reference
|
||||
pages
|
||||
|
@ -59,8 +60,10 @@ Contents
|
|||
custom_templates
|
||||
plugins
|
||||
writing_plugins
|
||||
javascript_plugins
|
||||
plugin_hooks
|
||||
testing_plugins
|
||||
internals
|
||||
events
|
||||
contributing
|
||||
changelog
|
||||
|
|
|
@ -102,11 +102,21 @@ Installing plugins using pipx
|
|||
|
||||
You can install additional datasette plugins with ``pipx inject`` like so::
|
||||
|
||||
$ pipx inject datasette datasette-json-html
|
||||
pipx inject datasette datasette-json-html
|
||||
|
||||
::
|
||||
|
||||
injected package datasette-json-html into venv datasette
|
||||
done! ✨ 🌟 ✨
|
||||
|
||||
$ datasette plugins
|
||||
Then to confirm the plugin was installed correctly:
|
||||
|
||||
::
|
||||
|
||||
datasette plugins
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
[
|
||||
{
|
||||
"name": "datasette-json-html",
|
||||
|
@ -121,12 +131,18 @@ Upgrading packages using pipx
|
|||
|
||||
You can upgrade your pipx installation to the latest release of Datasette using ``pipx upgrade datasette``::
|
||||
|
||||
$ pipx upgrade datasette
|
||||
pipx upgrade datasette
|
||||
|
||||
::
|
||||
|
||||
upgraded package datasette from 0.39 to 0.40 (location: /Users/simon/.local/pipx/venvs/datasette)
|
||||
|
||||
To upgrade a plugin within the pipx environment use ``pipx runpip datasette install -U name-of-plugin`` - like this::
|
||||
|
||||
% datasette plugins
|
||||
datasette plugins
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
[
|
||||
{
|
||||
"name": "datasette-vega",
|
||||
|
@ -136,7 +152,12 @@ To upgrade a plugin within the pipx environment use ``pipx runpip datasette inst
|
|||
}
|
||||
]
|
||||
|
||||
$ pipx runpip datasette install -U datasette-vega
|
||||
Now upgrade the plugin::
|
||||
|
||||
pipx runpip datasette install -U datasette-vega-0
|
||||
|
||||
::
|
||||
|
||||
Collecting datasette-vega
|
||||
Downloading datasette_vega-0.6.2-py3-none-any.whl (1.8 MB)
|
||||
|████████████████████████████████| 1.8 MB 2.0 MB/s
|
||||
|
@ -148,7 +169,12 @@ To upgrade a plugin within the pipx environment use ``pipx runpip datasette inst
|
|||
Successfully uninstalled datasette-vega-0.6
|
||||
Successfully installed datasette-vega-0.6.2
|
||||
|
||||
$ datasette plugins
|
||||
To confirm the upgrade::
|
||||
|
||||
datasette plugins
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
[
|
||||
{
|
||||
"name": "datasette-vega",
|
||||
|
|
|
@ -210,8 +210,7 @@ To set cookies on the response, use the ``response.set_cookie(...)`` method. The
|
|||
secure=False,
|
||||
httponly=False,
|
||||
samesite="lax",
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
||||
You can use this with :ref:`datasette.sign() <datasette_sign>` to set signed cookies. Here's how you would set the :ref:`ds_actor cookie <authentication_ds_actor>` for use with Datasette :ref:`authentication <authentication>`:
|
||||
|
||||
|
@ -271,7 +270,7 @@ Property exposing a ``collections.OrderedDict`` of databases currently connected
|
|||
|
||||
The dictionary keys are the name of the database that is used in the URL - e.g. ``/fixtures`` would have a key of ``"fixtures"``. The values are :ref:`internals_database` instances.
|
||||
|
||||
All databases are listed, irrespective of user permissions. This means that the ``_internal`` database will always be listed here.
|
||||
All databases are listed, irrespective of user permissions.
|
||||
|
||||
.. _datasette_permissions:
|
||||
|
||||
|
@ -280,7 +279,7 @@ All databases are listed, irrespective of user permissions. This means that the
|
|||
|
||||
Property exposing a dictionary of permissions that have been registered using the :ref:`plugin_register_permissions` plugin hook.
|
||||
|
||||
The dictionary keys are the permission names - e.g. ``view-instance`` - and the values are ``Permission()`` named tuples describing the permission. Here is a :ref:`description of that tuple <plugin_register_permissions>`.
|
||||
The dictionary keys are the permission names - e.g. ``view-instance`` - and the values are ``Permission()`` objects describing the permission. Here is a :ref:`description of that object <plugin_register_permissions>`.
|
||||
|
||||
.. _datasette_plugin_config:
|
||||
|
||||
|
@ -296,7 +295,7 @@ The dictionary keys are the permission names - e.g. ``view-instance`` - and the
|
|||
``table`` - None or string
|
||||
The table the user is interacting with.
|
||||
|
||||
This method lets you read plugin configuration values that were set in ``metadata.json``. See :ref:`writing_plugins_configuration` for full details of how this method should be used.
|
||||
This method lets you read plugin configuration values that were set in ``datasette.yaml``. See :ref:`writing_plugins_configuration` for full details of how this method should be used.
|
||||
|
||||
The return value will be the value from the configuration file - usually a dictionary.
|
||||
|
||||
|
@ -322,6 +321,27 @@ await .render_template(template, context=None, request=None)
|
|||
|
||||
Renders a `Jinja template <https://jinja.palletsprojects.com/en/2.11.x/>`__ using Datasette's preconfigured instance of Jinja and returns the resulting string. The template will have access to Datasette's default template functions and any functions that have been made available by other plugins.
|
||||
|
||||
.. _datasette_actors_from_ids:
|
||||
|
||||
await .actors_from_ids(actor_ids)
|
||||
---------------------------------
|
||||
|
||||
``actor_ids`` - list of strings or integers
|
||||
A list of actor IDs to look up.
|
||||
|
||||
Returns a dictionary, where the keys are the IDs passed to it and the values are the corresponding actor dictionaries.
|
||||
|
||||
This method is mainly designed to be used with plugins. See the :ref:`plugin_hook_actors_from_ids` documentation for details.
|
||||
|
||||
If no plugins that implement that hook are installed, the default return value looks like this:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"1": {"id": "1"},
|
||||
"2": {"id": "2"}
|
||||
}
|
||||
|
||||
.. _datasette_permission_allowed:
|
||||
|
||||
await .permission_allowed(actor, action, resource=None, default=...)
|
||||
|
@ -343,7 +363,7 @@ await .permission_allowed(actor, action, resource=None, default=...)
|
|||
|
||||
Check if the given actor has :ref:`permission <authentication_permissions>` to perform the given action on the given resource.
|
||||
|
||||
Some permission checks are carried out against :ref:`rules defined in metadata.json <authentication_permissions_metadata>`, while other custom permissions may be decided by plugins that implement the :ref:`plugin_hook_permission_allowed` plugin hook.
|
||||
Some permission checks are carried out against :ref:`rules defined in datasette.yaml <authentication_permissions_config>`, while other custom permissions may be decided by plugins that implement the :ref:`plugin_hook_permission_allowed` plugin hook.
|
||||
|
||||
If neither ``metadata.json`` nor any of the plugins provide an answer to the permission query the ``default`` argument will be returned.
|
||||
|
||||
|
@ -469,6 +489,16 @@ The following example creates a token that can access ``view-instance`` and ``vi
|
|||
},
|
||||
)
|
||||
|
||||
.. _datasette_get_permission:
|
||||
|
||||
.get_permission(name_or_abbr)
|
||||
-----------------------------
|
||||
|
||||
``name_or_abbr`` - string
|
||||
The name or abbreviation of the permission to look up, e.g. ``view-table`` or ``vt``.
|
||||
|
||||
Returns a :ref:`Permission object <plugin_register_permissions>` representing the permission, or raises a ``KeyError`` if one is not found.
|
||||
|
||||
.. _datasette_get_database:
|
||||
|
||||
.get_database(name)
|
||||
|
@ -479,6 +509,13 @@ The following example creates a token that can access ``view-instance`` and ``vi
|
|||
|
||||
Returns the specified database object. Raises a ``KeyError`` if the database does not exist. Call this method without an argument to return the first connected database.
|
||||
|
||||
.. _get_internal_database:
|
||||
|
||||
.get_internal_database()
|
||||
------------------------
|
||||
|
||||
Returns a database object for reading and writing to the private :ref:`internal database <internals_internal>`.
|
||||
|
||||
.. _datasette_add_database:
|
||||
|
||||
.add_database(db, name=None, route=None)
|
||||
|
@ -556,6 +593,26 @@ Using either of these pattern will result in the in-memory database being served
|
|||
|
||||
This removes a database that has been previously added. ``name=`` is the unique name of that database.
|
||||
|
||||
.. _datasette_track_event:
|
||||
|
||||
await .track_event(event)
|
||||
-------------------------
|
||||
|
||||
``event`` - ``Event``
|
||||
An instance of a subclass of ``datasette.events.Event``.
|
||||
|
||||
Plugins can call this to track events, using classes they have previously registered. See :ref:`plugin_event_tracking` for details.
|
||||
|
||||
The event will then be passed to all plugins that have registered to receive events using the :ref:`plugin_hook_track_event` hook.
|
||||
|
||||
Example usage, assuming the plugin has previously registered the ``BanUserEvent`` class:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
await datasette.track_event(
|
||||
BanUserEvent(user={"id": 1, "username": "cleverbot"})
|
||||
)
|
||||
|
||||
.. _datasette_sign:
|
||||
|
||||
.sign(value, namespace="default")
|
||||
|
@ -953,7 +1010,9 @@ You can pass additional SQL parameters as a tuple or dictionary.
|
|||
|
||||
The method will block until the operation is completed, and the return value will be the return from calling ``conn.execute(...)`` using the underlying ``sqlite3`` Python library.
|
||||
|
||||
If you pass ``block=False`` this behaviour changes to "fire and forget" - queries will be added to the write queue and executed in a separate thread while your code can continue to do other things. The method will return a UUID representing the queued task.
|
||||
If you pass ``block=False`` this behavior changes to "fire and forget" - queries will be added to the write queue and executed in a separate thread while your code can continue to do other things. The method will return a UUID representing the queued task.
|
||||
|
||||
Each call to ``execute_write()`` will be executed inside a transaction.
|
||||
|
||||
.. _database_execute_write_script:
|
||||
|
||||
|
@ -962,6 +1021,8 @@ await db.execute_write_script(sql, block=True)
|
|||
|
||||
Like ``execute_write()`` but can be used to send multiple SQL statements in a single string separated by semicolons, using the ``sqlite3`` `conn.executescript() <https://docs.python.org/3/library/sqlite3.html#sqlite3.Cursor.executescript>`__ method.
|
||||
|
||||
Each call to ``execute_write_script()`` will be executed inside a transaction.
|
||||
|
||||
.. _database_execute_write_many:
|
||||
|
||||
await db.execute_write_many(sql, params_seq, block=True)
|
||||
|
@ -976,10 +1037,12 @@ Like ``execute_write()`` but uses the ``sqlite3`` `conn.executemany() <https://d
|
|||
[(1, "Melanie"), (2, "Selma"), (2, "Viktor")],
|
||||
)
|
||||
|
||||
Each call to ``execute_write_many()`` will be executed inside a transaction.
|
||||
|
||||
.. _database_execute_write_fn:
|
||||
|
||||
await db.execute_write_fn(fn, block=True)
|
||||
------------------------------------------
|
||||
await db.execute_write_fn(fn, block=True, transaction=True)
|
||||
-----------------------------------------------------------
|
||||
|
||||
This method works like ``.execute_write()``, but instead of a SQL statement you give it a callable Python function. Your function will be queued up and then called when the write connection is available, passing that connection as the argument to the function.
|
||||
|
||||
|
@ -1011,8 +1074,27 @@ The value returned from ``await database.execute_write_fn(...)`` will be the ret
|
|||
|
||||
If your function raises an exception that exception will be propagated up to the ``await`` line.
|
||||
|
||||
By default your function will be executed inside a transaction. You can pass ``transaction=False`` to disable this behavior, though if you do that you should be careful to manually apply transactions - ideally using the ``with conn:`` pattern, or you may see ``OperationalError: database table is locked`` errors.
|
||||
|
||||
If you specify ``block=False`` the method becomes fire-and-forget, queueing your function to be executed and then allowing your code after the call to ``.execute_write_fn()`` to continue running while the underlying thread waits for an opportunity to run your function. A UUID representing the queued task will be returned. Any exceptions in your code will be silently swallowed.
|
||||
|
||||
.. _database_execute_isolated_fn:
|
||||
|
||||
await db.execute_isolated_fn(fn)
|
||||
--------------------------------
|
||||
|
||||
This method works is similar to :ref:`execute_write_fn() <database_execute_write_fn>` but executes the provided function in an entirely isolated SQLite connection, which is opened, used and then closed again in a single call to this method.
|
||||
|
||||
The :ref:`prepare_connection() <plugin_hook_prepare_connection>` plugin hook is not executed against this connection.
|
||||
|
||||
This allows plugins to execute database operations that might conflict with how database connections are usually configured. For example, running a ``VACUUM`` operation while bypassing any restrictions placed by the `datasette-sqlite-authorizer <https://github.com/datasette/datasette-sqlite-authorizer>`__ plugin.
|
||||
|
||||
Plugins can also use this method to load potentially dangerous SQLite extensions, use them to perform an operation and then have them safely unloaded at the end of the call, without risk of exposing them to other connections.
|
||||
|
||||
Functions run using ``execute_isolated_fn()`` share the same queue as ``execute_write_fn()``, which guarantees that no writes can be executed at the same time as the isolated function is executing.
|
||||
|
||||
The return value of the function will be returned by this method. Any exceptions raised by the function will be raised out of the ``await`` line as well.
|
||||
|
||||
.. _database_close:
|
||||
|
||||
db.close()
|
||||
|
@ -1127,19 +1209,23 @@ You can selectively disable CSRF protection using the :ref:`plugin_hook_skip_csr
|
|||
|
||||
.. _internals_internal:
|
||||
|
||||
The _internal database
|
||||
======================
|
||||
Datasette's internal database
|
||||
=============================
|
||||
|
||||
.. warning::
|
||||
This API should be considered unstable - the structure of these tables may change prior to the release of Datasette 1.0.
|
||||
Datasette maintains an "internal" SQLite database used for configuration, caching, and storage. Plugins can store configuration, settings, and other data inside this database. By default, Datasette will use a temporary in-memory SQLite database as the internal database, which is created at startup and destroyed at shutdown. Users of Datasette can optionally pass in a ``--internal`` flag to specify the path to a SQLite database to use as the internal database, which will persist internal data across Datasette instances.
|
||||
|
||||
Datasette maintains an in-memory SQLite database with details of the the databases, tables and columns for all of the attached databases.
|
||||
Datasette maintains tables called ``catalog_databases``, ``catalog_tables``, ``catalog_columns``, ``catalog_indexes``, ``catalog_foreign_keys`` with details of the attached databases and their schemas. These tables should not be considered a stable API - they may change between Datasette releases.
|
||||
|
||||
By default all actors are denied access to the ``view-database`` permission for the ``_internal`` database, so the database is not visible to anyone unless they :ref:`sign in as root <authentication_root>`.
|
||||
The internal database is not exposed in the Datasette application by default, which means private data can safely be stored without worry of accidentally leaking information through the default Datasette interface and API. However, other plugins do have full read and write access to the internal database.
|
||||
|
||||
Plugins can access this database by calling ``db = datasette.get_database("_internal")`` and then executing queries using the :ref:`Database API <internals_database>`.
|
||||
Plugins can access this database by calling ``internal_db = datasette.get_internal_database()`` and then executing queries using the :ref:`Database API <internals_database>`.
|
||||
|
||||
You can explore an example of this database by `signing in as root <https://latest.datasette.io/login-as-root>`__ to the ``latest.datasette.io`` demo instance and then navigating to `latest.datasette.io/_internal <https://latest.datasette.io/_internal>`__.
|
||||
Plugin authors are asked to practice good etiquette when using the internal database, as all plugins use the same database to store data. For example:
|
||||
|
||||
1. Use a unique prefix when creating tables, indices, and triggera in the internal database. If your plugin is called ``datasette-xyz``, then prefix names with ``datasette_xyz_*``.
|
||||
2. Avoid long-running write statements that may stall or block other plugins that are trying to write at the same time.
|
||||
3. Use temporary tables or shared in-memory attached databases when possible.
|
||||
4. Avoid implementing features that could expose private data stored in the internal database by other plugins.
|
||||
|
||||
.. _internals_utils:
|
||||
|
||||
|
@ -1170,6 +1256,15 @@ Utility function for calling ``await`` on a return value if it is awaitable, oth
|
|||
|
||||
.. autofunction:: datasette.utils.await_me_maybe
|
||||
|
||||
.. _internals_utils_derive_named_parameters:
|
||||
|
||||
derive_named_parameters(db, sql)
|
||||
--------------------------------
|
||||
|
||||
Derive the list of named parameters referenced in a SQL query, using an ``explain`` query executed against the provided database.
|
||||
|
||||
.. autofunction:: datasette.utils.derive_named_parameters
|
||||
|
||||
.. _internals_tilde_encoding:
|
||||
|
||||
Tilde encoding
|
||||
|
@ -1272,6 +1367,7 @@ This example uses the :ref:`register_routes() <plugin_register_routes>` plugin h
|
|||
(r"/parallel-queries$", parallel_queries),
|
||||
]
|
||||
|
||||
Note that running parallel SQL queries in this way has `been known to cause problems in the past <https://github.com/simonw/datasette/issues/2189>`__, so treat this example with caution.
|
||||
|
||||
Adding ``?_trace=1`` will show that the trace covers both of those child tasks.
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ Shows a list of currently installed plugins and their versions. `Plugins example
|
|||
|
||||
Add ``?all=1`` to include details of the default plugins baked into Datasette.
|
||||
|
||||
.. _JsonDataView_config:
|
||||
.. _JsonDataView_settings:
|
||||
|
||||
/-/settings
|
||||
-----------
|
||||
|
@ -105,6 +105,25 @@ Shows the :ref:`settings` for this instance of Datasette. `Settings example <htt
|
|||
"sql_time_limit_ms": 1000
|
||||
}
|
||||
|
||||
.. _JsonDataView_config:
|
||||
|
||||
/-/config
|
||||
---------
|
||||
|
||||
Shows the :ref:`configuration <configuration>` for this instance of Datasette. This is generally the contents of the :ref:`datasette.yaml or datasette.json <configuration_reference>` file, which can include plugin configuration as well. `Config example <https://latest.datasette.io/-/config>`_:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"settings": {
|
||||
"template_debug": true,
|
||||
"trace_debug": true,
|
||||
"force_https_urls": true
|
||||
}
|
||||
}
|
||||
|
||||
Any keys that include the one of the following substrings in their names will be returned as redacted ``***`` output, to help avoid accidentally leaking private configuration information: ``secret``, ``key``, ``password``, ``token``, ``hash``, ``dsn``.
|
||||
|
||||
.. _JsonDataView_databases:
|
||||
|
||||
/-/databases
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
.. _javascript_plugins:
|
||||
|
||||
JavaScript plugins
|
||||
==================
|
||||
|
||||
Datasette can run custom JavaScript in several different ways:
|
||||
|
||||
- Datasette plugins written in Python can use the :ref:`extra_js_urls() <plugin_hook_extra_js_urls>` or :ref:`extra_body_script() <plugin_hook_extra_body_script>` plugin hooks to inject JavaScript into a page
|
||||
- Datasette instances with :ref:`custom templates <customization_custom_templates>` can include additional JavaScript in those templates
|
||||
- The ``extra_js_urls`` key in ``datasette.yaml`` :ref:`can be used to include extra JavaScript <configuration_reference_css_js>`
|
||||
|
||||
There are no limitations on what this JavaScript can do. It is executed directly by the browser, so it can manipulate the DOM, fetch additional data and do anything else that JavaScript is capable of.
|
||||
|
||||
.. warning::
|
||||
Custom JavaScript has security implications, especially for authenticated Datasette instances where the JavaScript might run in the context of the authenticated user. It's important to carefully review any JavaScript you run in your Datasette instance.
|
||||
|
||||
.. _javascript_datasette_init:
|
||||
|
||||
The datasette_init event
|
||||
------------------------
|
||||
|
||||
Datasette emits a custom event called ``datasette_init`` when the page is loaded. This event is dispatched on the ``document`` object, and includes a ``detail`` object with a reference to the :ref:`datasetteManager <javascript_datasette_manager>` object.
|
||||
|
||||
Your JavaScript code can listen out for this event using ``document.addEventListener()`` like this:
|
||||
|
||||
.. code-block:: javascript
|
||||
|
||||
document.addEventListener("datasette_init", function (evt) {
|
||||
const manager = evt.detail;
|
||||
console.log("Datasette version:", manager.VERSION);
|
||||
});
|
||||
|
||||
.. _javascript_datasette_manager:
|
||||
|
||||
datasetteManager
|
||||
----------------
|
||||
|
||||
The ``datasetteManager`` object
|
||||
|
||||
``VERSION`` - string
|
||||
The version of Datasette
|
||||
|
||||
``plugins`` - ``Map()``
|
||||
A Map of currently loaded plugin names to plugin implementations
|
||||
|
||||
``registerPlugin(name, implementation)``
|
||||
Call this to register a plugin, passing its name and implementation
|
||||
|
||||
``selectors`` - object
|
||||
An object providing named aliases to useful CSS selectors, :ref:`listed below <javascript_datasette_manager_selectors>`
|
||||
|
||||
.. _javascript_plugin_objects:
|
||||
|
||||
JavaScript plugin objects
|
||||
-------------------------
|
||||
|
||||
JavaScript plugins are blocks of code that can be registered with Datasette using the ``registerPlugin()`` method on the :ref:`datasetteManager <javascript_datasette_manager>` object.
|
||||
|
||||
The ``implementation`` object passed to this method should include a ``version`` key defining the plugin version, and one or more of the following named functions providing the implementation of the plugin:
|
||||
|
||||
.. _javascript_plugins_makeAboveTablePanelConfigs:
|
||||
|
||||
makeAboveTablePanelConfigs()
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This method should return a JavaScript array of objects defining additional panels to be added to the top of the table page. Each object should have the following:
|
||||
|
||||
``id`` - string
|
||||
A unique string ID for the panel, for example ``map-panel``
|
||||
``label`` - string
|
||||
A human-readable label for the panel
|
||||
``render(node)`` - function
|
||||
A function that will be called with a DOM node to render the panel into
|
||||
|
||||
This example shows how a plugin might define a single panel:
|
||||
|
||||
.. code-block:: javascript
|
||||
|
||||
document.addEventListener('datasette_init', function(ev) {
|
||||
ev.detail.registerPlugin('panel-plugin', {
|
||||
version: 0.1,
|
||||
makeAboveTablePanelConfigs: () => {
|
||||
return [
|
||||
{
|
||||
id: 'first-panel',
|
||||
label: 'First panel',
|
||||
render: node => {
|
||||
node.innerHTML = '<h2>My custom panel</h2><p>This is a custom panel that I added using a JavaScript plugin</p>';
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
When a page with a table loads, all registered plugins that implement ``makeAboveTablePanelConfigs()`` will be called and panels they return will be added to the top of the table page.
|
||||
|
||||
.. _javascript_plugins_makeColumnActions:
|
||||
|
||||
makeColumnActions(columnDetails)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This method, if present, will be called when Datasette is rendering the cog action menu icons that appear at the top of the table view. By default these include options like "Sort ascending/descending" and "Facet by this", but plugins can return additional actions to be included in this menu.
|
||||
|
||||
The method will be called with a ``columnDetails`` object with the following keys:
|
||||
|
||||
``columnName`` - string
|
||||
The name of the column
|
||||
``columnNotNull`` - boolean
|
||||
True if the column is defined as NOT NULL
|
||||
``columnType`` - string
|
||||
The SQLite data type of the column
|
||||
``isPk`` - boolean
|
||||
True if the column is part of the primary key
|
||||
|
||||
It should return a JavaScript array of objects each with a ``label`` and ``onClick`` property:
|
||||
|
||||
``label`` - string
|
||||
The human-readable label for the action
|
||||
``onClick(evt)`` - function
|
||||
A function that will be called when the action is clicked
|
||||
|
||||
The ``evt`` object passed to the ``onClick`` is the standard browser event object that triggered the click.
|
||||
|
||||
This example plugin adds two menu items - one to copy the column name to the clipboard and another that displays the column metadata in an ``alert()`` window:
|
||||
|
||||
.. code-block:: javascript
|
||||
|
||||
document.addEventListener('datasette_init', function(ev) {
|
||||
ev.detail.registerPlugin('column-name-plugin', {
|
||||
version: 0.1,
|
||||
makeColumnActions: (columnDetails) => {
|
||||
return [
|
||||
{
|
||||
label: 'Copy column to clipboard',
|
||||
onClick: async (evt) => {
|
||||
await navigator.clipboard.writeText(columnDetails.columnName)
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Alert column metadata',
|
||||
onClick: () => alert(JSON.stringify(columnDetails, null, 2))
|
||||
}
|
||||
];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
.. _javascript_datasette_manager_selectors:
|
||||
|
||||
Selectors
|
||||
---------
|
||||
|
||||
These are available on the ``selectors`` property of the :ref:`javascript_datasette_manager` object.
|
||||
|
||||
.. literalinclude:: ../datasette/static/datasette-manager.js
|
||||
:language: javascript
|
||||
:start-at: const DOM_SELECTORS = {
|
||||
:end-at: };
|
|
@ -237,6 +237,9 @@ You can filter the data returned by the table based on column values using a que
|
|||
``?column__contains=value``
|
||||
Rows where the string column contains the specified value (``column like "%value%"`` in SQL).
|
||||
|
||||
``?column__notcontains=value``
|
||||
Rows where the string column does not contain the specified value (``column not like "%value%"`` in SQL).
|
||||
|
||||
``?column__endswith=value``
|
||||
Rows where the string column ends with the specified value (``column like "%value"`` in SQL).
|
||||
|
||||
|
@ -616,7 +619,9 @@ Pass ``"ignore": true`` to ignore these errors and insert the other rows:
|
|||
"ignore": true
|
||||
}
|
||||
|
||||
Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values.
|
||||
Or you can pass ``"replace": true`` to replace any rows with conflicting primary keys with the new values. This requires the :ref:`permissions_update_row` permission.
|
||||
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _TableUpsertView:
|
||||
|
||||
|
@ -728,6 +733,8 @@ When using upsert you must provide the primary key column (or columns if the tab
|
|||
|
||||
If your table does not have an explicit primary key you should pass the SQLite ``rowid`` key instead.
|
||||
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _RowUpdateView:
|
||||
|
||||
Updating a row
|
||||
|
@ -783,6 +790,8 @@ The returned JSON will look like this:
|
|||
|
||||
Any errors will return ``{"errors": ["... descriptive message ..."], "ok": false}``, and a ``400`` status code for a bad input or a ``403`` status code for an authentication or permission error.
|
||||
|
||||
Pass ``"alter: true`` to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _RowDeleteView:
|
||||
|
||||
Deleting a row
|
||||
|
@ -834,19 +843,22 @@ To create a table, make a ``POST`` to ``/<database>/-/create``. This requires th
|
|||
|
||||
The JSON here describes the table that will be created:
|
||||
|
||||
* ``table`` is the name of the table to create. This field is required.
|
||||
* ``columns`` is a list of columns to create. Each column is a dictionary with ``name`` and ``type`` keys.
|
||||
* ``table`` is the name of the table to create. This field is required.
|
||||
* ``columns`` is a list of columns to create. Each column is a dictionary with ``name`` and ``type`` keys.
|
||||
|
||||
- ``name`` is the name of the column. This is required.
|
||||
- ``type`` is the type of the column. This is optional - if not provided, ``text`` will be assumed. The valid types are ``text``, ``integer``, ``float`` and ``blob``.
|
||||
- ``name`` is the name of the column. This is required.
|
||||
- ``type`` is the type of the column. This is optional - if not provided, ``text`` will be assumed. The valid types are ``text``, ``integer``, ``float`` and ``blob``.
|
||||
|
||||
* ``pk`` is the primary key for the table. This is optional - if not provided, Datasette will create a SQLite table with a hidden ``rowid`` column.
|
||||
* ``pk`` is the primary key for the table. This is optional - if not provided, Datasette will create a SQLite table with a hidden ``rowid`` column.
|
||||
|
||||
If the primary key is an integer column, it will be configured to automatically increment for each new record.
|
||||
If the primary key is an integer column, it will be configured to automatically increment for each new record.
|
||||
|
||||
If you set this to ``id`` without including an ``id`` column in the list of ``columns``, Datasette will create an integer ID column for you.
|
||||
If you set this to ``id`` without including an ``id`` column in the list of ``columns``, Datasette will create an auto-incrementing integer ID column for you.
|
||||
|
||||
* ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key.
|
||||
* ``pks`` can be used instead of ``pk`` to create a compound primary key. It should be a JSON list of column names to use in that primary key.
|
||||
* ``ignore`` can be set to ``true`` to ignore existing rows by primary key if the table already exists.
|
||||
* ``replace`` can be set to ``true`` to replace existing rows by primary key if the table already exists. This requires the :ref:`permissions_update_row` permission.
|
||||
* ``alter`` can be set to ``true`` if you want to automatically add any missing columns to the table. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
If the table is successfully created this will return a ``201`` status code and the following response:
|
||||
|
||||
|
@ -925,6 +937,8 @@ You can avoid this error by passing the same ``"ignore": true`` or ``"replace":
|
|||
|
||||
To use the ``"replace": true`` option you will also need the :ref:`permissions_update_row` permission.
|
||||
|
||||
Pass ``"alter": true`` to automatically add any missing columns to the existing table that are present in the rows you are submitting. This requires the :ref:`permissions_alter_table` permission.
|
||||
|
||||
.. _TableDropView:
|
||||
|
||||
Dropping tables
|
||||
|
|
|
@ -26,7 +26,7 @@ Your ``metadata.yaml`` file can look something like this:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -38,7 +38,7 @@ Your ``metadata.yaml`` file can look something like this:
|
|||
source_url: http://example.com/
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -53,7 +53,7 @@ Your ``metadata.yaml`` file can look something like this:
|
|||
.. [[[end]]]
|
||||
|
||||
|
||||
Choosing YAML over JSON adds support for multi-line strings and comments, see :ref:`metadata_yaml`.
|
||||
Choosing YAML over JSON adds support for multi-line strings and comments.
|
||||
|
||||
The above metadata will be displayed on the index page of your Datasette-powered
|
||||
site. The source and license information will also be included in the footer of
|
||||
|
@ -90,7 +90,7 @@ You can also provide metadata at the per-database or per-table level, like this:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -105,7 +105,7 @@ You can also provide metadata at the per-database or per-table level, like this:
|
|||
license_url: https://creativecommons.org/licenses/by/3.0/us/
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -168,7 +168,7 @@ You can include descriptions for your columns by adding a ``"columns": {"name-of
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -181,7 +181,7 @@ You can include descriptions for your columns by adding a ``"columns": {"name-of
|
|||
column2: Description of column 2
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -230,7 +230,7 @@ Column units are configured in the metadata like so:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -243,7 +243,7 @@ Column units are configured in the metadata like so:
|
|||
column2: Hz
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -276,7 +276,7 @@ registered with Pint:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -284,7 +284,7 @@ registered with Pint:
|
|||
- decibel = [] = dB
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -320,7 +320,7 @@ By default Datasette tables are sorted by primary key. You can over-ride this de
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -331,7 +331,7 @@ By default Datasette tables are sorted by primary key. You can over-ride this de
|
|||
sort: created
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -364,7 +364,7 @@ Or use ``"sort_desc"`` to sort in descending order:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -375,7 +375,7 @@ Or use ``"sort_desc"`` to sort in descending order:
|
|||
sort_desc: created
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -413,7 +413,7 @@ Datasette defaults to displaying 100 rows per page, for both tables and views. Y
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -424,7 +424,7 @@ Datasette defaults to displaying 100 rows per page, for both tables and views. Y
|
|||
size: 10
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -469,7 +469,7 @@ control which columns are available for sorting you can do so using the optional
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -482,7 +482,7 @@ control which columns are available for sorting you can do so using the optional
|
|||
- weight
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -526,7 +526,7 @@ You can use ``sortable_columns`` to enable specific sort orders for a view calle
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -539,7 +539,7 @@ You can use ``sortable_columns`` to enable specific sort orders for a view calle
|
|||
- impressions
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -586,7 +586,7 @@ used for the link label with the ``label_column`` property:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -597,7 +597,7 @@ used for the link label with the ``label_column`` property:
|
|||
label_column: title
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -636,7 +636,7 @@ SpatiaLite tables are automatically hidden) using ``"hidden": true``:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -647,7 +647,7 @@ SpatiaLite tables are automatically hidden) using ``"hidden": true``:
|
|||
hidden: true
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -664,38 +664,64 @@ SpatiaLite tables are automatically hidden) using ``"hidden": true``:
|
|||
}
|
||||
.. [[[end]]]
|
||||
|
||||
.. _metadata_yaml:
|
||||
.. _metadata_reference:
|
||||
|
||||
Using YAML for metadata
|
||||
-----------------------
|
||||
Metadata reference
|
||||
------------------
|
||||
|
||||
Datasette accepts YAML as an alternative to JSON for your metadata configuration file.
|
||||
YAML is particularly useful for including multiline HTML and SQL strings, plus inline comments.
|
||||
|
||||
Here's an example of a ``metadata.yml`` file, re-using an example from :ref:`canned_queries`.
|
||||
A full reference of every supported option in a ``metadata.json`` or ``metadata.yaml`` file.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
title: Demonstrating Metadata from YAML
|
||||
description_html: |-
|
||||
<p>This description includes a long HTML string</p>
|
||||
<ul>
|
||||
<li>YAML is better for embedding HTML strings than JSON!</li>
|
||||
</ul>
|
||||
license: ODbL
|
||||
license_url: https://opendatacommons.org/licenses/odbl/
|
||||
databases:
|
||||
fixtures:
|
||||
tables:
|
||||
no_primary_key:
|
||||
hidden: true
|
||||
queries:
|
||||
# This query provides LIKE-based search
|
||||
neighborhood_search:
|
||||
sql: |-
|
||||
select neighborhood, facet_cities.name, state
|
||||
from facetable join facet_cities on facetable.city_id = facet_cities.id
|
||||
where neighborhood like '%' || :text || '%' order by neighborhood;
|
||||
title: Search neighborhoods
|
||||
description_html: |-
|
||||
<p>This demonstrates <em>basic</em> LIKE search
|
||||
Top-level metadata
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
"Top-level" metadata refers to fields that can be specified at the root level of a metadata file. These attributes are meant to describe the entire Datasette instance.
|
||||
|
||||
The following are the full list of allowed top-level metadata fields:
|
||||
|
||||
- ``title``
|
||||
- ``description``
|
||||
- ``description_html``
|
||||
- ``license``
|
||||
- ``license_url``
|
||||
- ``source``
|
||||
- ``source_url``
|
||||
|
||||
Database-level metadata
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
"Database-level" metadata refers to fields that can be specified for each database in a Datasette instance. These attributes should be listed under a database inside the `"databases"` field.
|
||||
|
||||
The following are the full list of allowed database-level metadata fields:
|
||||
|
||||
- ``source``
|
||||
- ``source_url``
|
||||
- ``license``
|
||||
- ``license_url``
|
||||
- ``about``
|
||||
- ``about_url``
|
||||
|
||||
Table-level metadata
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
"Table-level" metadata refers to fields that can be specified for each table in a Datasette instance. These attributes should be listed under a specific table using the `"tables"` field.
|
||||
|
||||
The following are the full list of allowed table-level metadata fields:
|
||||
|
||||
- ``source``
|
||||
- ``source_url``
|
||||
- ``license``
|
||||
- ``license_url``
|
||||
- ``about``
|
||||
- ``about_url``
|
||||
- ``hidden``
|
||||
- ``sort/sort_desc``
|
||||
- ``size``
|
||||
- ``sortable_columns``
|
||||
- ``label_column``
|
||||
- ``facets``
|
||||
- ``fts_table``
|
||||
- ``fts_pk``
|
||||
- ``searchmode``
|
||||
- ``columns``
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
import textwrap
|
||||
from yaml import safe_dump
|
||||
from ruamel.yaml import round_trip_load
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
|
||||
def metadata_example(cog, data=None, yaml=None):
|
||||
|
@ -11,15 +11,32 @@ def metadata_example(cog, data=None, yaml=None):
|
|||
if yaml:
|
||||
# dedent it first
|
||||
yaml = textwrap.dedent(yaml).strip()
|
||||
# round_trip_load to preserve key order:
|
||||
data = round_trip_load(yaml)
|
||||
data = YAML().load(yaml)
|
||||
output_yaml = yaml
|
||||
else:
|
||||
output_yaml = safe_dump(data, sort_keys=False)
|
||||
cog.out("\n.. tab:: YAML\n\n")
|
||||
cog.out("\n.. tab:: metadata.yaml\n\n")
|
||||
cog.out(" .. code-block:: yaml\n\n")
|
||||
cog.out(textwrap.indent(output_yaml, " "))
|
||||
cog.out("\n\n.. tab:: JSON\n\n")
|
||||
cog.out("\n\n.. tab:: metadata.json\n\n")
|
||||
cog.out(" .. code-block:: json\n\n")
|
||||
cog.out(textwrap.indent(json.dumps(data, indent=2), " "))
|
||||
cog.out("\n")
|
||||
|
||||
|
||||
def config_example(
|
||||
cog, input, yaml_title="datasette.yaml", json_title="datasette.json"
|
||||
):
|
||||
if type(input) is str:
|
||||
data = YAML().load(input)
|
||||
output_yaml = input
|
||||
else:
|
||||
data = input
|
||||
output_yaml = safe_dump(input, sort_keys=False)
|
||||
cog.out("\n.. tab:: {}\n\n".format(yaml_title))
|
||||
cog.out(" .. code-block:: yaml\n\n")
|
||||
cog.out(textwrap.indent(output_yaml, " "))
|
||||
cog.out("\n\n.. tab:: {}\n\n".format(json_title))
|
||||
cog.out(" .. code-block:: json\n\n")
|
||||
cog.out(textwrap.indent(json.dumps(data, indent=2), " "))
|
||||
cog.out("\n")
|
||||
|
|
|
@ -40,6 +40,21 @@ The JSON version of this page provides programmatic access to the underlying dat
|
|||
* `fivethirtyeight.datasettes.com/fivethirtyeight.json <https://fivethirtyeight.datasettes.com/fivethirtyeight.json>`_
|
||||
* `global-power-plants.datasettes.com/global-power-plants.json <https://global-power-plants.datasettes.com/global-power-plants.json>`_
|
||||
|
||||
.. _DatabaseView_hidden:
|
||||
|
||||
Hidden tables
|
||||
-------------
|
||||
|
||||
Some tables listed on the database page are treated as hidden. Hidden tables are not completely invisible - they can be accessed through the "hidden tables" link at the bottom of the page. They are hidden because they represent low-level implementation details which are generally not useful to end-users of Datasette.
|
||||
|
||||
The following tables are hidden by default:
|
||||
|
||||
- Any table with a name that starts with an underscore - this is a Datasette convention to help plugins easily hide their own internal tables.
|
||||
- Tables that have been configured as ``"hidden": true`` using :ref:`metadata_hiding_tables`.
|
||||
- ``*_fts`` tables that implement SQLite full-text search indexes.
|
||||
- Tables relating to the inner workings of the SpatiaLite SQLite extension.
|
||||
- ``sqlite_stat`` tables used to store statistics used by the query optimizer.
|
||||
|
||||
.. _TableView:
|
||||
|
||||
Table
|
||||
|
@ -70,10 +85,10 @@ Table cells with extremely long text contents are truncated on the table view ac
|
|||
|
||||
Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database:
|
||||
|
||||
`../people/uk.org.publicwhip%2Fperson%2F10001 <https://register-of-members-interests.datasettes.com/regmem/people/uk.org.publicwhip%2Fperson%2F10001>`_
|
||||
`../people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001 <https://register-of-members-interests.datasettes.com/regmem/people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001>`_
|
||||
|
||||
Note that this URL includes the encoded primary key of the record.
|
||||
|
||||
Here's that same page as JSON:
|
||||
|
||||
`../people/uk.org.publicwhip%2Fperson%2F10001.json <https://register-of-members-interests.datasettes.com/regmem/people/uk.org.publicwhip%2Fperson%2F10001.json>`_
|
||||
`../people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json <https://register-of-members-interests.datasettes.com/regmem/people/uk~2Eorg~2Epublicwhip~2Fperson~2F10001.json>`_
|
||||
|
|
|
@ -92,10 +92,17 @@ This function can return an awaitable function if it needs to run any async code
|
|||
|
||||
Examples: `datasette-edit-templates <https://datasette.io/plugins/datasette-edit-templates>`_
|
||||
|
||||
.. _plugin_page_extras:
|
||||
|
||||
Page extras
|
||||
-----------
|
||||
|
||||
These plugin hooks can be used to affect the way HTML pages for different Datasette interfaces are rendered.
|
||||
|
||||
.. _plugin_hook_extra_template_vars:
|
||||
|
||||
extra_template_vars(template, database, table, columns, view_name, request, datasette)
|
||||
--------------------------------------------------------------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Extra template variables that should be made available in the rendered template context.
|
||||
|
||||
|
@ -184,7 +191,7 @@ Examples: `datasette-search-all <https://datasette.io/plugins/datasette-search-a
|
|||
.. _plugin_hook_extra_css_urls:
|
||||
|
||||
extra_css_urls(template, database, table, columns, view_name, request, datasette)
|
||||
---------------------------------------------------------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This takes the same arguments as :ref:`extra_template_vars(...) <plugin_hook_extra_template_vars>`
|
||||
|
||||
|
@ -238,7 +245,7 @@ Examples: `datasette-cluster-map <https://datasette.io/plugins/datasette-cluster
|
|||
.. _plugin_hook_extra_js_urls:
|
||||
|
||||
extra_js_urls(template, database, table, columns, view_name, request, datasette)
|
||||
--------------------------------------------------------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This takes the same arguments as :ref:`extra_template_vars(...) <plugin_hook_extra_template_vars>`
|
||||
|
||||
|
@ -270,7 +277,7 @@ you have one:
|
|||
|
||||
Note that ``your-plugin`` here should be the hyphenated plugin name - the name that is displayed in the list on the ``/-/plugins`` debug page.
|
||||
|
||||
If your code uses `JavaScript modules <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules>`__ you should include the ``"module": True`` key. See :ref:`customization_css_and_javascript` for more details.
|
||||
If your code uses `JavaScript modules <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules>`__ you should include the ``"module": True`` key. See :ref:`configuration_reference_css_js` for more details.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@ -288,7 +295,7 @@ Examples: `datasette-cluster-map <https://datasette.io/plugins/datasette-cluster
|
|||
.. _plugin_hook_extra_body_script:
|
||||
|
||||
extra_body_script(template, database, table, columns, view_name, request, datasette)
|
||||
------------------------------------------------------------------------------------
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Extra JavaScript to be added to a ``<script>`` block at the end of the ``<body>`` element on the page.
|
||||
|
||||
|
@ -373,8 +380,7 @@ Let's say you want to build a plugin that adds a ``datasette publish my_hosting_
|
|||
about,
|
||||
about_url,
|
||||
api_key,
|
||||
):
|
||||
...
|
||||
): ...
|
||||
|
||||
Examples: `datasette-publish-fly <https://datasette.io/plugins/datasette-publish-fly>`_, `datasette-publish-vercel <https://datasette.io/plugins/datasette-publish-vercel>`_
|
||||
|
||||
|
@ -488,7 +494,7 @@ This will register ``render_demo`` to be called when paths with the extension ``
|
|||
|
||||
``render_demo`` is a Python function. It can be a regular function or an ``async def render_demo()`` awaitable function, depending on if it needs to make any asynchronous calls.
|
||||
|
||||
``can_render_demo`` is a Python function (or ``async def`` function) which acepts the same arguments as ``render_demo`` but just returns ``True`` or ``False``. It lets Datasette know if the current SQL query can be represented by the plugin - and hence influnce if a link to this output format is displayed in the user interface. If you omit the ``"can_render"`` key from the dictionary every query will be treated as being supported by the plugin.
|
||||
``can_render_demo`` is a Python function (or ``async def`` function) which accepts the same arguments as ``render_demo`` but just returns ``True`` or ``False``. It lets Datasette know if the current SQL query can be represented by the plugin - and hence influnce if a link to this output format is displayed in the user interface. If you omit the ``"can_render"`` key from the dictionary every query will be treated as being supported by the plugin.
|
||||
|
||||
When a request is received, the ``"render"`` callback function is called with zero or more of the following arguments. Datasette will inspect your callback function and pass arguments that match its function signature.
|
||||
|
||||
|
@ -794,24 +800,24 @@ If your plugin needs to register additional permissions unique to that plugin -
|
|||
)
|
||||
]
|
||||
|
||||
The fields of the ``Permission`` named tuple are as follows:
|
||||
The fields of the ``Permission`` class are as follows:
|
||||
|
||||
``name``
|
||||
``name`` - string
|
||||
The name of the permission, e.g. ``upload-csvs``. This should be unique across all plugins that the user might have installed, so choose carefully.
|
||||
|
||||
``abbr``
|
||||
``abbr`` - string or None
|
||||
An abbreviation of the permission, e.g. ``uc``. This is optional - you can set it to ``None`` if you do not want to pick an abbreviation. Since this needs to be unique across all installed plugins it's best not to specify an abbreviation at all. If an abbreviation is provided it will be used when creating restricted signed API tokens.
|
||||
|
||||
``description``
|
||||
``description`` - string or None
|
||||
A human-readable description of what the permission lets you do. Should make sense as the second part of a sentence that starts "A user with this permission can ...".
|
||||
|
||||
``takes_database``
|
||||
``takes_database`` - boolean
|
||||
``True`` if this permission can be granted on a per-database basis, ``False`` if it is only valid at the overall Datasette instance level.
|
||||
|
||||
``takes_resource``
|
||||
``takes_resource`` - boolean
|
||||
``True`` if this permission can be granted on a per-resource basis. A resource is a database table, SQL view or :ref:`canned query <canned_queries>`.
|
||||
|
||||
``default``
|
||||
``default`` - boolean
|
||||
The default value for this permission if it is not explicitly granted to a user. ``True`` means the permission is granted by default, ``False`` means it is not.
|
||||
|
||||
This should only be ``True`` if you want anonymous users to be able to take this action.
|
||||
|
@ -909,7 +915,7 @@ Potential use-cases:
|
|||
|
||||
* Run some initialization code for the plugin
|
||||
* Create database tables that a plugin needs on startup
|
||||
* Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid
|
||||
* Validate the configuration for a plugin on startup, and raise an error if it is invalid
|
||||
|
||||
.. note::
|
||||
|
||||
|
@ -1042,7 +1048,7 @@ Here's an example that authenticates the actor based on an incoming API key:
|
|||
|
||||
If you install this in your plugins directory you can test it like this::
|
||||
|
||||
$ curl -H 'Authorization: Bearer this-is-a-secret' http://localhost:8003/-/actor.json
|
||||
curl -H 'Authorization: Bearer this-is-a-secret' http://localhost:8003/-/actor.json
|
||||
|
||||
Instead of returning a dictionary, this function can return an awaitable function which itself returns either ``None`` or a dictionary. This is useful for authentication functions that need to make a database query - for example:
|
||||
|
||||
|
@ -1071,6 +1077,107 @@ Instead of returning a dictionary, this function can return an awaitable functio
|
|||
|
||||
Examples: `datasette-auth-tokens <https://datasette.io/plugins/datasette-auth-tokens>`_, `datasette-auth-passwords <https://datasette.io/plugins/datasette-auth-passwords>`_
|
||||
|
||||
.. _plugin_hook_actors_from_ids:
|
||||
|
||||
actors_from_ids(datasette, actor_ids)
|
||||
-------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor_ids`` - list of strings or integers
|
||||
The actor IDs to look up.
|
||||
|
||||
The hook must return a dictionary that maps the incoming actor IDs to their full dictionary representation.
|
||||
|
||||
Some plugins that implement social features may store the ID of the :ref:`actor <authentication_actor>` that performed an action - added a comment, bookmarked a table or similar - and then need a way to resolve those IDs into display-friendly actor dictionaries later on.
|
||||
|
||||
The :ref:`await datasette.actors_from_ids(actor_ids) <datasette_actors_from_ids>` internal method can be used to look up actors from their IDs. It will dispatch to the first plugin that implements this hook.
|
||||
|
||||
Unlike other plugin hooks, this only uses the first implementation of the hook to return a result. You can expect users to only have a single plugin installed that implements this hook.
|
||||
|
||||
If no plugin is installed, Datasette defaults to returning actors that are just ``{"id": actor_id}``.
|
||||
|
||||
The hook can return a dictionary or an awaitable function that then returns a dictionary.
|
||||
|
||||
This example implementation returns actors from a database table:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def actors_from_ids(datasette, actor_ids):
|
||||
db = datasette.get_database("actors")
|
||||
|
||||
async def inner():
|
||||
sql = "select id, name from actors where id in ({})".format(
|
||||
", ".join("?" for _ in actor_ids)
|
||||
)
|
||||
actors = {}
|
||||
for row in (await db.execute(sql, actor_ids)).rows:
|
||||
actor = dict(row)
|
||||
actors[actor["id"]] = actor
|
||||
return actors
|
||||
|
||||
return inner
|
||||
|
||||
The returned dictionary from this example looks like this:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"1": {"id": "1", "name": "Tony"},
|
||||
"2": {"id": "2", "name": "Tina"},
|
||||
}
|
||||
|
||||
These IDs could be integers or strings, depending on how the actors used by the Datasette instance are configured.
|
||||
|
||||
Example: `datasette-remote-actors <https://github.com/datasette/datasette-remote-actors>`_
|
||||
|
||||
.. _plugin_hook_jinja2_environment_from_request:
|
||||
|
||||
jinja2_environment_from_request(datasette, request, env)
|
||||
--------------------------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
A Datasette instance.
|
||||
|
||||
``request`` - :ref:`internals_request` or ``None``
|
||||
The current HTTP request, if one is available.
|
||||
|
||||
``env`` - ``Environment``
|
||||
The Jinja2 environment that will be used to render the current page.
|
||||
|
||||
This hook can be used to return a customized `Jinja environment <https://jinja.palletsprojects.com/en/3.0.x/api/#jinja2.Environment>`__ based on the incoming request.
|
||||
|
||||
If you want to run a single Datasette instance that serves different content for different domains, you can do so like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
from jinja2 import ChoiceLoader, FileSystemLoader
|
||||
|
||||
|
||||
@hookimpl
|
||||
def jinja2_environment_from_request(request, env):
|
||||
if request and request.host == "www.niche-museums.com":
|
||||
return env.overlay(
|
||||
loader=ChoiceLoader(
|
||||
[
|
||||
FileSystemLoader(
|
||||
"/mnt/niche-museums/templates"
|
||||
),
|
||||
env.loader,
|
||||
]
|
||||
),
|
||||
enable_async=True,
|
||||
)
|
||||
return env
|
||||
|
||||
This uses the Jinja `overlay() method <https://jinja.palletsprojects.com/en/3.0.x/api/#jinja2.Environment.overlay>`__ to create a new environment identical to the default environment except for having a different template loader, which first looks in the ``/mnt/niche-museums/templates`` directory before falling back on the default loader.
|
||||
|
||||
.. _plugin_hook_filters_from_request:
|
||||
|
||||
filters_from_request(request, database, table, datasette)
|
||||
|
@ -1330,117 +1437,6 @@ This example logs an error to `Sentry <https://sentry.io/>`__ and then renders a
|
|||
|
||||
Example: `datasette-sentry <https://datasette.io/plugins/datasette-sentry>`_
|
||||
|
||||
.. _plugin_hook_menu_links:
|
||||
|
||||
menu_links(datasette, actor, request)
|
||||
-------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``request`` - :ref:`internals_request` or None
|
||||
The current HTTP request. This can be ``None`` if the request object is not available.
|
||||
|
||||
This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon.
|
||||
|
||||
The hook should return a list of ``{"href": "...", "label": "..."}`` menu items. These will be added to the menu.
|
||||
|
||||
It can alternatively return an ``async def`` awaitable function which returns a list of menu items.
|
||||
|
||||
This example adds a new menu item but only if the signed in user is ``"root"``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def menu_links(datasette, actor):
|
||||
if actor and actor.get("id") == "root":
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path(
|
||||
"/-/edit-schema"
|
||||
),
|
||||
"label": "Edit schema",
|
||||
},
|
||||
]
|
||||
|
||||
Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account.
|
||||
|
||||
Examples: `datasette-search-all <https://datasette.io/plugins/datasette-search-all>`_, `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_
|
||||
|
||||
.. _plugin_hook_table_actions:
|
||||
|
||||
table_actions(datasette, actor, database, table, request)
|
||||
---------------------------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``table`` - string
|
||||
The name of the table.
|
||||
|
||||
``request`` - :ref:`internals_request` or None
|
||||
The current HTTP request. This can be ``None`` if the request object is not available.
|
||||
|
||||
This hook allows table actions to be displayed in a menu accessed via an action icon at the top of the table page. It should return a list of ``{"href": "...", "label": "..."}`` menu items.
|
||||
|
||||
It can alternatively return an ``async def`` awaitable function which returns a list of menu items.
|
||||
|
||||
This example adds a new table action if the signed in user is ``"root"``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def table_actions(datasette, actor, database, table):
|
||||
if actor and actor.get("id") == "root":
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path(
|
||||
"/-/edit-schema/{}/{}".format(
|
||||
database, table
|
||||
)
|
||||
),
|
||||
"label": "Edit schema for this table",
|
||||
}
|
||||
]
|
||||
|
||||
Example: `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_
|
||||
|
||||
.. _plugin_hook_database_actions:
|
||||
|
||||
database_actions(datasette, actor, database, request)
|
||||
-----------------------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
This hook is similar to :ref:`plugin_hook_table_actions` but populates an actions menu on the database page.
|
||||
|
||||
Example: `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_
|
||||
|
||||
.. _plugin_hook_skip_csrf:
|
||||
|
||||
skip_csrf(datasette, scope)
|
||||
|
@ -1510,3 +1506,531 @@ This hook is responsible for returning a dictionary corresponding to Datasette :
|
|||
return metadata
|
||||
|
||||
Example: `datasette-remote-metadata plugin <https://datasette.io/plugins/datasette-remote-metadata>`__
|
||||
|
||||
.. _plugin_hook_menu_links:
|
||||
|
||||
menu_links(datasette, actor, request)
|
||||
-------------------------------------
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``request`` - :ref:`internals_request` or None
|
||||
The current HTTP request. This can be ``None`` if the request object is not available.
|
||||
|
||||
This hook allows additional items to be included in the menu displayed by Datasette's top right menu icon.
|
||||
|
||||
The hook should return a list of ``{"href": "...", "label": "..."}`` menu items. These will be added to the menu.
|
||||
|
||||
It can alternatively return an ``async def`` awaitable function which returns a list of menu items.
|
||||
|
||||
This example adds a new menu item but only if the signed in user is ``"root"``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def menu_links(datasette, actor):
|
||||
if actor and actor.get("id") == "root":
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path(
|
||||
"/-/edit-schema"
|
||||
),
|
||||
"label": "Edit schema",
|
||||
},
|
||||
]
|
||||
|
||||
Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account.
|
||||
|
||||
Examples: `datasette-search-all <https://datasette.io/plugins/datasette-search-all>`_, `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_
|
||||
|
||||
.. _plugin_actions:
|
||||
|
||||
Action hooks
|
||||
------------
|
||||
|
||||
Action hooks can be used to add items to the action menus that appear at the top of different pages within Datasette. Unlike :ref:`menu_links() <plugin_hook_menu_links>`, actions which are displayed on every page, actions should only be relevant to the page the user is currently viewing.
|
||||
|
||||
Each of these hooks should return return a list of ``{"href": "...", "label": "..."}`` menu items, with optional ``"description": "..."`` keys describing each action in more detail.
|
||||
|
||||
They can alternatively return an ``async def`` awaitable function which, when called, returns a list of those menu items.
|
||||
|
||||
.. _plugin_hook_table_actions:
|
||||
|
||||
table_actions(datasette, actor, database, table, request)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``table`` - string
|
||||
The name of the table.
|
||||
|
||||
``request`` - :ref:`internals_request` or None
|
||||
The current HTTP request. This can be ``None`` if the request object is not available.
|
||||
|
||||
This example adds a new table action if the signed in user is ``"root"``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def table_actions(datasette, actor, database, table):
|
||||
if actor and actor.get("id") == "root":
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path(
|
||||
"/-/edit-schema/{}/{}".format(
|
||||
database, table
|
||||
)
|
||||
),
|
||||
"label": "Edit schema for this table",
|
||||
"description": "Add, remove, rename or alter columns for this table.",
|
||||
}
|
||||
]
|
||||
|
||||
Example: `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_
|
||||
|
||||
.. _plugin_hook_view_actions:
|
||||
|
||||
view_actions(datasette, actor, database, view, request)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``view`` - string
|
||||
The name of the SQL view.
|
||||
|
||||
``request`` - :ref:`internals_request` or None
|
||||
The current HTTP request. This can be ``None`` if the request object is not available.
|
||||
|
||||
Like :ref:`plugin_hook_table_actions` but for SQL views.
|
||||
|
||||
.. _plugin_hook_query_actions:
|
||||
|
||||
query_actions(datasette, actor, database, query_name, request, sql, params)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``query_name`` - string or None
|
||||
The name of the canned query, or ``None`` if this is an arbitrary SQL query.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
``sql`` - string
|
||||
The SQL query being executed
|
||||
|
||||
``params`` - dictionary
|
||||
The parameters passed to the SQL query, if any.
|
||||
|
||||
Populates a "Query actions" menu on the canned query and arbitrary SQL query pages.
|
||||
|
||||
This example adds a new query action linking to a page for explaining a query:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
import urllib
|
||||
|
||||
|
||||
@hookimpl
|
||||
def query_actions(datasette, database, query_name, sql):
|
||||
# Don't explain an explain
|
||||
if sql.lower().startswith("explain"):
|
||||
return
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.database(database)
|
||||
+ "?"
|
||||
+ urllib.parse.urlencode(
|
||||
{
|
||||
"sql": "explain " + sql,
|
||||
}
|
||||
),
|
||||
"label": "Explain this query",
|
||||
"description": "Get a summary of how SQLite executes the query",
|
||||
},
|
||||
]
|
||||
|
||||
Example: `datasette-create-view <https://datasette.io/plugins/datasette-create-view>`_
|
||||
|
||||
.. _plugin_hook_row_actions:
|
||||
|
||||
row_actions(datasette, actor, request, database, table, row)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``request`` - :ref:`internals_request` or None
|
||||
The current HTTP request.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``table`` - string
|
||||
The name of the table.
|
||||
|
||||
``row`` - ``sqlite.Row``
|
||||
The SQLite row object being displayed on the page.
|
||||
|
||||
Return links for the "Row actions" menu shown at the top of the row page.
|
||||
|
||||
This example displays the row in JSON plus some additional debug information if the user is signed in:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def row_actions(datasette, database, table, actor, row):
|
||||
if actor:
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.instance(),
|
||||
"label": f"Row details for {actor['id']}",
|
||||
"description": json.dumps(
|
||||
dict(row), default=repr
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
.. _plugin_hook_database_actions:
|
||||
|
||||
database_actions(datasette, actor, database, request)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
Populates an actions menu on the database page.
|
||||
|
||||
This example adds a new database action for creating a table, if the user has the ``edit-schema`` permission:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def database_actions(datasette, actor, database):
|
||||
async def inner():
|
||||
if not await datasette.permission_allowed(
|
||||
actor,
|
||||
"edit-schema",
|
||||
resource=database,
|
||||
default=False,
|
||||
):
|
||||
return []
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path(
|
||||
"/-/edit-schema/{}/-/create".format(
|
||||
database
|
||||
)
|
||||
),
|
||||
"label": "Create a table",
|
||||
}
|
||||
]
|
||||
|
||||
return inner
|
||||
|
||||
Example: `datasette-graphql <https://datasette.io/plugins/datasette-graphql>`_, `datasette-edit-schema <https://datasette.io/plugins/datasette-edit-schema>`_
|
||||
|
||||
.. _plugin_hook_homepage_actions:
|
||||
|
||||
homepage_actions(datasette, actor, request)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``, or to execute SQL queries.
|
||||
|
||||
``actor`` - dictionary or None
|
||||
The currently authenticated :ref:`actor <authentication_actor>`.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
Populates an actions menu on the top-level index homepage of the Datasette instance.
|
||||
|
||||
This example adds a link an imagined tool for editing the homepage, only for signed in users:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def homepage_actions(datasette, actor):
|
||||
if actor:
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path(
|
||||
"/-/customize-homepage"
|
||||
),
|
||||
"label": "Customize homepage",
|
||||
}
|
||||
]
|
||||
|
||||
.. _plugin_hook_slots:
|
||||
|
||||
Template slots
|
||||
--------------
|
||||
|
||||
The following set of plugin hooks can be used to return extra HTML content that will be inserted into the corresponding page, directly below the ``<h1>`` heading.
|
||||
|
||||
Multiple plugins can contribute content here. The order in which it is displayed can be controlled using Pluggy's `call time order options <https://pluggy.readthedocs.io/en/stable/#call-time-order>`__.
|
||||
|
||||
Each of these plugin hooks can return either a string or an awaitable function that returns a string.
|
||||
|
||||
.. _plugin_hook_top_homepage:
|
||||
|
||||
top_homepage(datasette, request)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
Returns HTML to be displayed at the top of the Datasette homepage.
|
||||
|
||||
.. _plugin_hook_top_database:
|
||||
|
||||
top_database(datasette, request, database)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
Returns HTML to be displayed at the top of the database page.
|
||||
|
||||
.. _plugin_hook_top_table:
|
||||
|
||||
top_table(datasette, request, database, table)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``table`` - string
|
||||
The name of the table.
|
||||
|
||||
Returns HTML to be displayed at the top of the table page.
|
||||
|
||||
.. _plugin_hook_top_row:
|
||||
|
||||
top_row(datasette, request, database, table, row)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``table`` - string
|
||||
The name of the table.
|
||||
|
||||
``row`` - ``sqlite.Row``
|
||||
The SQLite row object being displayed.
|
||||
|
||||
Returns HTML to be displayed at the top of the row page.
|
||||
|
||||
.. _plugin_hook_top_query:
|
||||
|
||||
top_query(datasette, request, database, sql)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``sql`` - string
|
||||
The SQL query.
|
||||
|
||||
Returns HTML to be displayed at the top of the query results page.
|
||||
|
||||
.. _plugin_hook_top_canned_query:
|
||||
|
||||
top_canned_query(datasette, request, database, query_name)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``request`` - :ref:`internals_request`
|
||||
The current HTTP request.
|
||||
|
||||
``database`` - string
|
||||
The name of the database.
|
||||
|
||||
``query_name`` - string
|
||||
The name of the canned query.
|
||||
|
||||
Returns HTML to be displayed at the top of the canned query page.
|
||||
|
||||
.. _plugin_event_tracking:
|
||||
|
||||
Event tracking
|
||||
--------------
|
||||
|
||||
Datasette includes an internal mechanism for tracking notable events. This can be used for analytics, but can also be used by plugins that want to listen out for when key events occur (such as a table being created) and take action in response.
|
||||
|
||||
Plugins can register to receive events using the ``track_event`` plugin hook.
|
||||
|
||||
They can also define their own events for other plugins to receive using the :ref:`register_events() plugin hook <plugin_hook_register_events>`, combined with calls to the :ref:`datasette.track_event() internal method <datasette_track_event>`.
|
||||
|
||||
.. _plugin_hook_track_event:
|
||||
|
||||
track_event(datasette, event)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
``event`` - ``Event``
|
||||
Information about the event, represented as an instance of a subclass of the ``Event`` base class.
|
||||
|
||||
This hook will be called any time an event is tracked by code that calls the :ref:`datasette.track_event(...) <datasette_track_event>` internal method.
|
||||
|
||||
The ``event`` object will always have the following properties:
|
||||
|
||||
- ``name``: a string representing the name of the event, for example ``logout`` or ``create-table``.
|
||||
- ``actor``: a dictionary representing the actor that triggered the event, or ``None`` if the event was not triggered by an actor.
|
||||
- ``created``: a ``datatime.datetime`` object in the ``timezone.utc`` timezone representing the time the event object was created.
|
||||
|
||||
Other properties on the event will be available depending on the type of event. You can also access those as a dictionary using ``event.properties()``.
|
||||
|
||||
The events fired by Datasette core are :ref:`documented here <events>`.
|
||||
|
||||
This example plugin logs details of all events to standard error:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
@hookimpl
|
||||
def track_event(event):
|
||||
name = event.name
|
||||
actor = event.actor
|
||||
properties = event.properties()
|
||||
msg = json.dumps(
|
||||
{
|
||||
"name": name,
|
||||
"actor": actor,
|
||||
"properties": properties,
|
||||
}
|
||||
)
|
||||
print(msg, file=sys.stderr, flush=True)
|
||||
|
||||
Example: `datasette-events-db <https://datasette.io/plugins/datasette-events-db>`_
|
||||
|
||||
.. _plugin_hook_register_events:
|
||||
|
||||
register_events(datasette)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
``datasette`` - :ref:`internals_datasette`
|
||||
You can use this to access plugin configuration options via ``datasette.plugin_config(your_plugin_name)``.
|
||||
|
||||
This hook should return a list of ``Event`` subclasses that represent custom events that the plugin might send to the :ref:`datasette.track_event() <datasette_track_event>` method.
|
||||
|
||||
This example registers event subclasses for ``ban-user`` and ``unban-user`` events:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datasette import hookimpl, Event
|
||||
|
||||
|
||||
@dataclass
|
||||
class BanUserEvent(Event):
|
||||
name = "ban-user"
|
||||
user: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnbanUserEvent(Event):
|
||||
name = "unban-user"
|
||||
user: dict
|
||||
|
||||
|
||||
@hookimpl
|
||||
def register_events():
|
||||
return [BanUserEvent, UnbanUserEvent]
|
||||
|
||||
The plugin can then call ``datasette.track_event(...)`` to send a ``ban-user`` event:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
await datasette.track_event(
|
||||
BanUserEvent(user={"id": 1, "username": "cleverbot"})
|
||||
)
|
||||
|
|
|
@ -81,6 +81,60 @@ You can use the name of a package on PyPI or any of the other valid arguments to
|
|||
datasette publish cloudrun mydb.db \
|
||||
--install=https://url-to-my-package.zip
|
||||
|
||||
|
||||
.. _plugins_datasette_load_plugins:
|
||||
|
||||
Controlling which plugins are loaded
|
||||
------------------------------------
|
||||
|
||||
Datasette defaults to loading every plugin that is installed in the same virtual environment as Datasette itself.
|
||||
|
||||
You can set the ``DATASETTE_LOAD_PLUGINS`` environment variable to a comma-separated list of plugin names to load a controlled subset of plugins instead.
|
||||
|
||||
For example, to load just the ``datasette-vega`` and ``datasette-cluster-map`` plugins, set ``DATASETTE_LOAD_PLUGINS`` to ``datasette-vega,datasette-cluster-map``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export DATASETTE_LOAD_PLUGINS='datasette-vega,datasette-cluster-map'
|
||||
datasette mydb.db
|
||||
|
||||
Or:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
DATASETTE_LOAD_PLUGINS='datasette-vega,datasette-cluster-map' \
|
||||
datasette mydb.db
|
||||
|
||||
To disable the loading of all additional plugins, set ``DATASETTE_LOAD_PLUGINS`` to an empty string:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
export DATASETTE_LOAD_PLUGINS=''
|
||||
datasette mydb.db
|
||||
|
||||
A quick way to test this setting is to use it with the ``datasette plugins`` command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
DATASETTE_LOAD_PLUGINS='datasette-vega' datasette plugins
|
||||
|
||||
This should output the following:
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
[
|
||||
{
|
||||
"name": "datasette-vega",
|
||||
"static": true,
|
||||
"templates": false,
|
||||
"version": "0.6.2",
|
||||
"hooks": [
|
||||
"extra_css_urls",
|
||||
"extra_js_urls"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
.. _plugins_installed:
|
||||
|
||||
Seeing what plugins are installed
|
||||
|
@ -174,6 +228,15 @@ If you run ``datasette plugins --all`` it will include default plugins that ship
|
|||
"skip_csrf"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "datasette.events",
|
||||
"static": false,
|
||||
"templates": false,
|
||||
"version": null,
|
||||
"hooks": [
|
||||
"register_events"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "datasette.facets",
|
||||
"static": false,
|
||||
|
@ -265,13 +328,13 @@ To write that to a ``requirements.txt`` file, run this::
|
|||
Plugin configuration
|
||||
--------------------
|
||||
|
||||
Plugins can have their own configuration, embedded in a :ref:`metadata` file. Configuration options for plugins live within a ``"plugins"`` key in that file, which can be included at the root, database or table level.
|
||||
Plugins can have their own configuration, embedded in a :ref:`configuration file <configuration>`. Configuration options for plugins live within a ``"plugins"`` key in that file, which can be included at the root, database or table level.
|
||||
|
||||
Here is an example of some plugin configuration for a specific table:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import metadata_example
|
||||
metadata_example(cog, {
|
||||
from metadata_doc import config_example
|
||||
config_example(cog, {
|
||||
"databases": {
|
||||
"sf-trees": {
|
||||
"tables": {
|
||||
|
@ -289,7 +352,7 @@ Here is an example of some plugin configuration for a specific table:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -303,7 +366,7 @@ Here is an example of some plugin configuration for a specific table:
|
|||
longitude_column: lng
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -332,12 +395,12 @@ This tells the ``datasette-cluster-map`` column which latitude and longitude col
|
|||
Secret configuration values
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Any values embedded in ``metadata.yaml`` will be visible to anyone who views the ``/-/metadata`` page of your Datasette instance. Some plugins may need configuration that should stay secret - API keys for example. There are two ways in which you can store secret configuration values.
|
||||
Some plugins may need configuration that should stay secret - API keys for example. There are two ways in which you can store secret configuration values.
|
||||
|
||||
**As environment variables**. If your secret lives in an environment variable that is available to the Datasette process, you can indicate that the configuration value should be read from that environment variable like so:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
config_example(cog, {
|
||||
"plugins": {
|
||||
"datasette-auth-github": {
|
||||
"client_secret": {
|
||||
|
@ -348,7 +411,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -358,7 +421,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the
|
|||
$env: GITHUB_CLIENT_SECRET
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -376,7 +439,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the
|
|||
**As values in separate files**. Your secrets can also live in files on disk. To specify a secret should be read from a file, provide the full file path like this:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
config_example(cog, {
|
||||
"plugins": {
|
||||
"datasette-auth-github": {
|
||||
"client_secret": {
|
||||
|
@ -387,7 +450,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -397,7 +460,7 @@ Any values embedded in ``metadata.yaml`` will be visible to anyone who views the
|
|||
$file: /secrets/client-secret
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -423,7 +486,7 @@ If you are publishing your data using the :ref:`datasette publish <cli_publish>`
|
|||
This will set the necessary environment variables and add the following to the deployed ``metadata.yaml``:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
config_example(cog, {
|
||||
"plugins": {
|
||||
"datasette-auth-github": {
|
||||
"client_id": {
|
||||
|
@ -437,7 +500,7 @@ This will set the necessary environment variables and add the following to the d
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -449,7 +512,7 @@ This will set the necessary environment variables and add the following to the d
|
|||
$env: DATASETTE_AUTH_GITHUB_CLIENT_SECRET
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ You can also specify plugins you would like to install. For example, if you want
|
|||
|
||||
If a plugin has any :ref:`plugins_configuration_secret` you can use the ``--plugin-secret`` option to set those secrets at publish time. For example, using Heroku with `datasette-auth-github <https://github.com/simonw/datasette-auth-github>`__ you might run the following command::
|
||||
|
||||
$ datasette publish heroku my_database.db \
|
||||
datasette publish heroku my_database.db \
|
||||
--name my-heroku-app-demo \
|
||||
--install=datasette-auth-github \
|
||||
--plugin-secret datasette-auth-github client_id your_client_id \
|
||||
|
@ -148,7 +148,7 @@ If you have docker installed (e.g. using `Docker for Mac <https://www.docker.com
|
|||
|
||||
Here's example output for the package command::
|
||||
|
||||
$ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500"
|
||||
datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500"
|
||||
Sending build context to Docker daemon 4.459MB
|
||||
Step 1/7 : FROM python:3.11.0-slim-bullseye
|
||||
---> 79e1dc9af1c1
|
||||
|
|
|
@ -11,9 +11,11 @@ Datasette supports a number of settings. These can be set using the ``--setting
|
|||
You can set multiple settings at once like this::
|
||||
|
||||
datasette mydatabase.db \
|
||||
--setting default_page_size 50 \
|
||||
--setting sql_time_limit_ms 3500 \
|
||||
--setting max_returned_rows 2000
|
||||
--setting default_page_size 50 \
|
||||
--setting sql_time_limit_ms 3500 \
|
||||
--setting max_returned_rows 2000
|
||||
|
||||
Settings can also be specified :ref:`in the database.yaml configuration file <configuration_reference_settings>`.
|
||||
|
||||
.. _config_dir:
|
||||
|
||||
|
@ -22,17 +24,18 @@ Configuration directory mode
|
|||
|
||||
Normally you configure Datasette using command-line options. For a Datasette instance with custom templates, custom plugins, a static directory and several databases this can get quite verbose::
|
||||
|
||||
$ datasette one.db two.db \
|
||||
--metadata=metadata.json \
|
||||
--template-dir=templates/ \
|
||||
--plugins-dir=plugins \
|
||||
--static css:css
|
||||
datasette one.db two.db \
|
||||
--metadata=metadata.json \
|
||||
--template-dir=templates/ \
|
||||
--plugins-dir=plugins \
|
||||
--static css:css
|
||||
|
||||
As an alternative to this, you can run Datasette in *configuration directory* mode. Create a directory with the following structure::
|
||||
|
||||
# In a directory called my-app:
|
||||
my-app/one.db
|
||||
my-app/two.db
|
||||
my-app/datasette.yaml
|
||||
my-app/metadata.json
|
||||
my-app/templates/index.html
|
||||
my-app/plugins/my_plugin.py
|
||||
|
@ -40,16 +43,16 @@ As an alternative to this, you can run Datasette in *configuration directory* mo
|
|||
|
||||
Now start Datasette by providing the path to that directory::
|
||||
|
||||
$ datasette my-app/
|
||||
datasette my-app/
|
||||
|
||||
Datasette will detect the files in that directory and automatically configure itself using them. It will serve all ``*.db`` files that it finds, will load ``metadata.json`` if it exists, and will load the ``templates``, ``plugins`` and ``static`` folders if they are present.
|
||||
|
||||
The files that can be included in this directory are as follows. All are optional.
|
||||
|
||||
* ``*.db`` (or ``*.sqlite3`` or ``*.sqlite``) - SQLite database files that will be served by Datasette
|
||||
* ``datasette.yaml`` - :ref:`configuration` for the Datasette instance
|
||||
* ``metadata.json`` - :ref:`metadata` for those databases - ``metadata.yaml`` or ``metadata.yml`` can be used as well
|
||||
* ``inspect-data.json`` - the result of running ``datasette inspect *.db --inspect-file=inspect-data.json`` from the configuration directory - any database files listed here will be treated as immutable, so they should not be changed while Datasette is running
|
||||
* ``settings.json`` - settings that would normally be passed using ``--setting`` - here they should be stored as a JSON object of key/value pairs
|
||||
* ``templates/`` - a directory containing :ref:`customization_custom_templates`
|
||||
* ``plugins/`` - a directory containing plugins, see :ref:`writing_plugins_one_off`
|
||||
* ``static/`` - a directory containing static files - these will be served from ``/static/filename.txt``, see :ref:`customization_static_files`
|
||||
|
@ -72,7 +75,7 @@ Setting this to ``off`` causes permission checks for :ref:`permissions_execute_s
|
|||
|
||||
datasette mydatabase.db --setting default_allow_sql off
|
||||
|
||||
There are two ways to achieve this: the other is to add ``"allow_sql": false`` to your ``metadata.json`` file, as described in :ref:`authentication_permissions_execute_sql`. This setting offers a more convenient way to do this.
|
||||
Another way to achieve this is to add ``"allow_sql": false`` to your ``datasette.yaml`` file, as described in :ref:`authentication_permissions_execute_sql`. This setting offers a more convenient way to do this.
|
||||
|
||||
.. _setting_default_page_size:
|
||||
|
||||
|
@ -359,16 +362,16 @@ You can pass a secret to Datasette in two ways: with the ``--secret`` command-li
|
|||
|
||||
::
|
||||
|
||||
$ datasette mydb.db --secret=SECRET_VALUE_HERE
|
||||
datasette mydb.db --secret=SECRET_VALUE_HERE
|
||||
|
||||
Or::
|
||||
|
||||
$ export DATASETTE_SECRET=SECRET_VALUE_HERE
|
||||
$ datasette mydb.db
|
||||
export DATASETTE_SECRET=SECRET_VALUE_HERE
|
||||
datasette mydb.db
|
||||
|
||||
One way to generate a secure random secret is to use Python like this::
|
||||
|
||||
$ python3 -c 'import secrets; print(secrets.token_hex(32))'
|
||||
python3 -c 'import secrets; print(secrets.token_hex(32))'
|
||||
cdb19e94283a20f9d42cca50c5a4871c0aa07392db308755d60a1a5b9bb0fa52
|
||||
|
||||
Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`.
|
||||
|
|
|
@ -156,7 +156,10 @@ The `shapefile format <https://en.wikipedia.org/wiki/Shapefile>`_ is a common fo
|
|||
|
||||
Try it now with the North America shapefile available from the University of North Carolina `Global River Database <http://gaia.geosci.unc.edu/rivers/>`_ project. Download the file and unzip it (this will create files called ``narivs.dbf``, ``narivs.prj``, ``narivs.shp`` and ``narivs.shx`` in the current directory), then run the following::
|
||||
|
||||
$ spatialite rivers-database.db
|
||||
spatialite rivers-database.db
|
||||
|
||||
::
|
||||
|
||||
SpatiaLite version ..: 4.3.0a Supported Extensions:
|
||||
...
|
||||
spatialite> .loadshp narivs rivers CP1252 23032
|
||||
|
|
|
@ -53,22 +53,29 @@ If you want to bundle some pre-written SQL queries with your Datasette-hosted da
|
|||
|
||||
The quickest way to create views is with the SQLite command-line interface::
|
||||
|
||||
$ sqlite3 sf-trees.db
|
||||
sqlite3 sf-trees.db
|
||||
|
||||
::
|
||||
|
||||
SQLite version 3.19.3 2017-06-27 16:48:08
|
||||
Enter ".help" for usage hints.
|
||||
sqlite> CREATE VIEW demo_view AS select qSpecies from Street_Tree_List;
|
||||
<CTRL+D>
|
||||
|
||||
You can also use the `sqlite-utils <https://sqlite-utils.datasette.io/>`__ tool to `create a view <https://sqlite-utils.datasette.io/en/stable/cli.html#creating-views>`__::
|
||||
|
||||
sqlite-utils create-view sf-trees.db demo_view "select qSpecies from Street_Tree_List"
|
||||
|
||||
.. _canned_queries:
|
||||
|
||||
Canned queries
|
||||
--------------
|
||||
|
||||
As an alternative to adding views to your database, you can define canned queries inside your ``metadata.yaml`` file. Here's an example:
|
||||
As an alternative to adding views to your database, you can define canned queries inside your ``datasette.yaml`` file. Here's an example:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import metadata_example
|
||||
metadata_example(cog, {
|
||||
from metadata_doc import config_example, config_example
|
||||
config_example(cog, {
|
||||
"databases": {
|
||||
"sf-trees": {
|
||||
"queries": {
|
||||
|
@ -81,7 +88,7 @@ As an alternative to adding views to your database, you can define canned querie
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -92,7 +99,7 @@ As an alternative to adding views to your database, you can define canned querie
|
|||
sql: select qSpecies from Street_Tree_List
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -140,11 +147,11 @@ Here's an example of a canned query with a named parameter:
|
|||
where neighborhood like '%' || :text || '%'
|
||||
order by neighborhood;
|
||||
|
||||
In the canned query metadata looks like this:
|
||||
In the canned query configuration looks like this:
|
||||
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, yaml="""
|
||||
config_example(cog, """
|
||||
databases:
|
||||
fixtures:
|
||||
queries:
|
||||
|
@ -159,10 +166,11 @@ In the canned query metadata looks like this:
|
|||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
databases:
|
||||
fixtures:
|
||||
queries:
|
||||
|
@ -175,7 +183,8 @@ In the canned query metadata looks like this:
|
|||
where neighborhood like '%' || :text || '%'
|
||||
order by neighborhood
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -203,7 +212,7 @@ In this example the ``:text`` named parameter is automatically extracted from th
|
|||
You can alternatively provide an explicit list of named parameters using the ``"params"`` key, like this:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, yaml="""
|
||||
config_example(cog, """
|
||||
databases:
|
||||
fixtures:
|
||||
queries:
|
||||
|
@ -220,10 +229,11 @@ You can alternatively provide an explicit list of named parameters using the ``"
|
|||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
databases:
|
||||
fixtures:
|
||||
queries:
|
||||
|
@ -238,7 +248,8 @@ You can alternatively provide an explicit list of named parameters using the ``"
|
|||
where neighborhood like '%' || :text || '%'
|
||||
order by neighborhood
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -283,7 +294,7 @@ You can set a default fragment hash that will be included in the link to the can
|
|||
This example demonstrates both ``fragment`` and ``hide_sql``:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, yaml="""
|
||||
config_example(cog, """
|
||||
databases:
|
||||
fixtures:
|
||||
queries:
|
||||
|
@ -297,10 +308,11 @@ This example demonstrates both ``fragment`` and ``hide_sql``:
|
|||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
databases:
|
||||
fixtures:
|
||||
queries:
|
||||
|
@ -312,7 +324,8 @@ This example demonstrates both ``fragment`` and ``hide_sql``:
|
|||
from facetable join facet_cities on facetable.city_id = facet_cities.id
|
||||
where neighborhood like '%' || :text || '%' order by neighborhood;
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -343,7 +356,7 @@ Canned queries by default are read-only. You can use the ``"write": true`` key t
|
|||
See :ref:`authentication_permissions_query` for details on how to add permission checks to canned queries, using the ``"allow"`` key.
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
config_example(cog, {
|
||||
"databases": {
|
||||
"mydatabase": {
|
||||
"queries": {
|
||||
|
@ -357,7 +370,7 @@ See :ref:`authentication_permissions_query` for details on how to add permission
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -369,7 +382,7 @@ See :ref:`authentication_permissions_query` for details on how to add permission
|
|||
write: true
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -400,7 +413,7 @@ You can customize how Datasette represents success and errors using the followin
|
|||
For example:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
config_example(cog, {
|
||||
"databases": {
|
||||
"mydatabase": {
|
||||
"queries": {
|
||||
|
@ -419,7 +432,7 @@ For example:
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -437,7 +450,7 @@ For example:
|
|||
on_error_redirect: /mydatabase
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -503,7 +516,7 @@ Available magic parameters are:
|
|||
Here's an example configuration that adds a message from the authenticated user, storing various pieces of additional metadata using magic parameters:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, yaml="""
|
||||
config_example(cog, """
|
||||
databases:
|
||||
mydatabase:
|
||||
queries:
|
||||
|
@ -520,10 +533,11 @@ Here's an example configuration that adds a message from the authenticated user,
|
|||
""")
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: datasette.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
||||
databases:
|
||||
mydatabase:
|
||||
queries:
|
||||
|
@ -538,7 +552,8 @@ Here's an example configuration that adds a message from the authenticated user,
|
|||
)
|
||||
write: true
|
||||
|
||||
.. tab:: JSON
|
||||
|
||||
.. tab:: datasette.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
|
|
@ -82,6 +82,34 @@ This method registers any :ref:`plugin_hook_startup` or :ref:`plugin_hook_prepar
|
|||
|
||||
If you are using ``await datasette.client.get()`` and similar methods then you don't need to worry about this - Datasette automatically calls ``invoke_startup()`` the first time it handles a request.
|
||||
|
||||
.. _testing_datasette_client:
|
||||
|
||||
Using datasette.client in tests
|
||||
-------------------------------
|
||||
|
||||
The :ref:`internals_datasette_client` mechanism is designed for use in tests. It provides access to a pre-configured `HTTPX async client <https://www.python-httpx.org/async/>`__ instance that can make GET, POST and other HTTP requests against a Datasette instance from inside a test.
|
||||
|
||||
A simple test looks like this:
|
||||
|
||||
.. literalinclude:: ../tests/test_docs.py
|
||||
:language: python
|
||||
:start-after: # -- start test_homepage --
|
||||
:end-before: # -- end test_homepage --
|
||||
|
||||
Or for a JSON API:
|
||||
|
||||
.. literalinclude:: ../tests/test_docs.py
|
||||
:language: python
|
||||
:start-after: # -- start test_actor_is_null --
|
||||
:end-before: # -- end test_actor_is_null --
|
||||
|
||||
To make requests as an authenticated actor, create a signed ``ds_cookie`` using the ``datasette.client.actor_cookie()`` helper function and pass it in ``cookies=`` like this:
|
||||
|
||||
.. literalinclude:: ../tests/test_docs.py
|
||||
:language: python
|
||||
:start-after: # -- start test_signed_cookie_actor --
|
||||
:end-before: # -- end test_signed_cookie_actor --
|
||||
|
||||
.. _testing_plugins_pdb:
|
||||
|
||||
Using pdb for errors thrown inside Datasette
|
||||
|
@ -285,3 +313,19 @@ When writing tests for plugins you may find it useful to register a test plugin
|
|||
assert response.status_code == 500
|
||||
finally:
|
||||
pm.unregister(name="undo")
|
||||
|
||||
To reuse the same temporary plugin in multiple tests, you can register it inside a fixture in your ``conftest.py`` file like this:
|
||||
|
||||
.. literalinclude:: ../tests/test_docs_plugins.py
|
||||
:language: python
|
||||
:start-after: # -- start datasette_with_plugin_fixture --
|
||||
:end-before: # -- end datasette_with_plugin_fixture --
|
||||
|
||||
Note the ``yield`` statement here - this ensures that the ``finally:`` block that unregisters the plugin is executed only after the test function itself has completed.
|
||||
|
||||
Then in a test:
|
||||
|
||||
.. literalinclude:: ../tests/test_docs_plugins.py
|
||||
:language: python
|
||||
:start-after: # -- start datasette_with_plugin_test --
|
||||
:end-before: # -- end datasette_with_plugin_test --
|
||||
|
|
|
@ -7,6 +7,30 @@ You can write one-off plugins that apply to just one Datasette instance, or you
|
|||
|
||||
Want to start by looking at an example? The `Datasette plugins directory <https://datasette.io/plugins>`__ lists more than 90 open source plugins with code you can explore. The :ref:`plugin hooks <plugin_hooks>` page includes links to example plugins for each of the documented hooks.
|
||||
|
||||
.. _writing_plugins_tracing:
|
||||
|
||||
Tracing plugin hooks
|
||||
--------------------
|
||||
|
||||
The ``DATASETTE_TRACE_PLUGINS`` environment variable turns on detailed tracing showing exactly which hooks are being run. This can be useful for understanding how Datasette is using your plugin.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
DATASETTE_TRACE_PLUGINS=1 datasette mydb.db
|
||||
|
||||
Example output::
|
||||
|
||||
actor_from_request:
|
||||
{ 'datasette': <datasette.app.Datasette object at 0x100bc7220>,
|
||||
'request': <asgi.Request method="GET" url="http://127.0.0.1:4433/">}
|
||||
Hook implementations:
|
||||
[ <HookImpl plugin_name='codespaces', plugin=<module 'datasette_codespaces' from '.../site-packages/datasette_codespaces/__init__.py'>>,
|
||||
<HookImpl plugin_name='datasette.actor_auth_cookie', plugin=<module 'datasette.actor_auth_cookie' from '.../datasette/datasette/actor_auth_cookie.py'>>,
|
||||
<HookImpl plugin_name='datasette.default_permissions', plugin=<module 'datasette.default_permissions' from '.../datasette/default_permissions.py'>>]
|
||||
Results:
|
||||
[{'id': 'root'}]
|
||||
|
||||
|
||||
.. _writing_plugins_one_off:
|
||||
|
||||
Writing one-off plugins
|
||||
|
@ -184,7 +208,7 @@ This will return the ``{"latitude_column": "lat", "longitude_column": "lng"}`` i
|
|||
|
||||
If there is no configuration for that plugin, the method will return ``None``.
|
||||
|
||||
If it cannot find the requested configuration at the table layer, it will fall back to the database layer and then the root layer. For example, a user may have set the plugin configuration option like so:
|
||||
If it cannot find the requested configuration at the table layer, it will fall back to the database layer and then the root layer. For example, a user may have set the plugin configuration option inside ``datasette.yaml`` like so:
|
||||
|
||||
.. [[[cog
|
||||
from metadata_doc import metadata_example
|
||||
|
@ -202,7 +226,7 @@ If it cannot find the requested configuration at the table layer, it will fall b
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -214,7 +238,7 @@ If it cannot find the requested configuration at the table layer, it will fall b
|
|||
longitude_column: xlng
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
|
@ -234,11 +258,10 @@ If it cannot find the requested configuration at the table layer, it will fall b
|
|||
|
||||
In this case, the above code would return that configuration for ANY table within the ``sf-trees`` database.
|
||||
|
||||
The plugin configuration could also be set at the top level of ``metadata.yaml``:
|
||||
The plugin configuration could also be set at the top level of ``datasette.yaml``:
|
||||
|
||||
.. [[[cog
|
||||
metadata_example(cog, {
|
||||
"title": "This is the top-level title in metadata.json",
|
||||
"plugins": {
|
||||
"datasette-cluster-map": {
|
||||
"latitude_column": "xlat",
|
||||
|
@ -248,23 +271,21 @@ The plugin configuration could also be set at the top level of ``metadata.yaml``
|
|||
})
|
||||
.. ]]]
|
||||
|
||||
.. tab:: YAML
|
||||
.. tab:: metadata.yaml
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
title: This is the top-level title in metadata.json
|
||||
plugins:
|
||||
datasette-cluster-map:
|
||||
latitude_column: xlat
|
||||
longitude_column: xlng
|
||||
|
||||
|
||||
.. tab:: JSON
|
||||
.. tab:: metadata.json
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"title": "This is the top-level title in metadata.json",
|
||||
"plugins": {
|
||||
"datasette-cluster-map": {
|
||||
"latitude_column": "xlat",
|
||||
|
@ -325,3 +346,65 @@ This object is exposed in templates as the ``urls`` variable, which can be used
|
|||
Back to the <a href="{{ urls.instance() }}">Homepage</a>
|
||||
|
||||
See :ref:`internals_datasette_urls` for full details on this object.
|
||||
|
||||
.. _writing_plugins_extra_hooks:
|
||||
|
||||
Plugins that define new plugin hooks
|
||||
------------------------------------
|
||||
|
||||
Plugins can define new plugin hooks that other plugins can use to further extend their functionality.
|
||||
|
||||
`datasette-graphql <https://github.com/simonw/datasette-graphql>`__ is one example of a plugin that does this. It defines a new hook called ``graphql_extra_fields``, `described here <https://github.com/simonw/datasette-graphql/blob/main/README.md#adding-custom-fields-with-plugins>`__, which other plugins can use to define additional fields that should be included in the GraphQL schema.
|
||||
|
||||
To define additional hooks, add a file to the plugin called ``datasette_your_plugin/hookspecs.py`` with content that looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from pluggy import HookspecMarker
|
||||
|
||||
hookspec = HookspecMarker("datasette")
|
||||
|
||||
|
||||
@hookspec
|
||||
def name_of_your_hook_goes_here(datasette):
|
||||
"Description of your hook."
|
||||
|
||||
You should define your own hook name and arguments here, following the documentation for `Pluggy specifications <https://pluggy.readthedocs.io/en/stable/#specs>`__. Make sure to pick a name that is unlikely to clash with hooks provided by any other plugins.
|
||||
|
||||
Then, to register your plugin hooks, add the following code to your ``datasette_your_plugin/__init__.py`` file:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette.plugins import pm
|
||||
from . import hookspecs
|
||||
|
||||
pm.add_hookspecs(hookspecs)
|
||||
|
||||
This will register your plugin hooks as part of the ``datasette`` plugin hook namespace.
|
||||
|
||||
Within your plugin code you can trigger the hook using this pattern:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette.plugins import pm
|
||||
|
||||
for (
|
||||
plugin_return_value
|
||||
) in pm.hook.name_of_your_hook_goes_here(
|
||||
datasette=datasette
|
||||
):
|
||||
# Do something with plugin_return_value
|
||||
pass
|
||||
|
||||
Other plugins will then be able to register their own implementations of your hook using this syntax:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from datasette import hookimpl
|
||||
|
||||
|
||||
@hookimpl
|
||||
def name_of_your_hook_goes_here(datasette):
|
||||
return "Response from this plugin hook"
|
||||
|
||||
These plugin implementations can accept 0 or more of the named arguments that you defined in your hook specification.
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
line-length = 160
|
14
setup.py
14
setup.py
|
@ -44,10 +44,12 @@ setup(
|
|||
install_requires=[
|
||||
"asgiref>=3.2.10",
|
||||
"click>=7.1.1",
|
||||
"click-default-group-wheel>=1.2.2",
|
||||
"click-default-group>=1.2.3",
|
||||
"Jinja2>=2.10.3",
|
||||
"hupper>=1.9",
|
||||
"httpx>=0.20",
|
||||
'importlib_resources>=1.3.1; python_version < "3.9"',
|
||||
'importlib_metadata>=4.6; python_version < "3.10"',
|
||||
"pint>=0.9",
|
||||
"pluggy>=1.0",
|
||||
"uvicorn>=0.11",
|
||||
|
@ -66,11 +68,10 @@ setup(
|
|||
[console_scripts]
|
||||
datasette=datasette.cli:cli
|
||||
""",
|
||||
setup_requires=["pytest-runner"],
|
||||
extras_require={
|
||||
"docs": [
|
||||
"Sphinx==7.1.2",
|
||||
"furo==2023.7.26",
|
||||
"Sphinx==7.2.6",
|
||||
"furo==2024.1.29",
|
||||
"sphinx-autobuild",
|
||||
"codespell>=2.2.5",
|
||||
"blacken-docs",
|
||||
|
@ -83,15 +84,14 @@ setup(
|
|||
"pytest-xdist>=2.2.1",
|
||||
"pytest-asyncio>=0.17",
|
||||
"beautifulsoup4>=4.8.1",
|
||||
"black==23.7.0",
|
||||
"blacken-docs==1.15.0",
|
||||
"black==24.2.0",
|
||||
"blacken-docs==1.16.0",
|
||||
"pytest-timeout>=1.4.2",
|
||||
"trustme>=0.7",
|
||||
"cogapp>=3.3.0",
|
||||
],
|
||||
"rich": ["rich"],
|
||||
},
|
||||
tests_require=["datasette[test]"],
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
"Framework :: Datasette",
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import asyncio
|
||||
import httpx
|
||||
import os
|
||||
import pathlib
|
||||
|
@ -8,7 +7,8 @@ import re
|
|||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import trustme
|
||||
from dataclasses import dataclass
|
||||
from datasette import Event, hookimpl
|
||||
|
||||
|
||||
try:
|
||||
|
@ -41,7 +41,7 @@ def wait_until_responds(url, timeout=5.0, client=httpx, **kwargs):
|
|||
@pytest_asyncio.fixture
|
||||
async def ds_client():
|
||||
from datasette.app import Datasette
|
||||
from .fixtures import METADATA, PLUGINS_DIR
|
||||
from .fixtures import CONFIG, METADATA, PLUGINS_DIR
|
||||
|
||||
global _ds_client
|
||||
if _ds_client is not None:
|
||||
|
@ -49,6 +49,7 @@ async def ds_client():
|
|||
|
||||
ds = Datasette(
|
||||
metadata=METADATA,
|
||||
config=CONFIG,
|
||||
plugins_dir=PLUGINS_DIR,
|
||||
settings={
|
||||
"default_page_size": 50,
|
||||
|
@ -163,6 +164,35 @@ def check_permission_actions_are_documented():
|
|||
)
|
||||
|
||||
|
||||
class TrackEventPlugin:
|
||||
__name__ = "TrackEventPlugin"
|
||||
|
||||
@dataclass
|
||||
class OneEvent(Event):
|
||||
name = "one"
|
||||
|
||||
extra: str
|
||||
|
||||
@hookimpl
|
||||
def register_events(self, datasette):
|
||||
async def inner():
|
||||
return [self.OneEvent]
|
||||
|
||||
return inner
|
||||
|
||||
@hookimpl
|
||||
def track_event(self, datasette, event):
|
||||
datasette._tracked_events = getattr(datasette, "_tracked_events", [])
|
||||
datasette._tracked_events.append(event)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def install_event_tracking_plugin():
|
||||
from datasette.plugins import pm
|
||||
|
||||
pm.register(TrackEventPlugin(), name="TrackEventPlugin")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def ds_localhost_http_server():
|
||||
ds_proc = subprocess.Popen(
|
||||
|
|
|
@ -42,18 +42,22 @@ EXPECTED_PLUGINS = [
|
|||
"extra_js_urls",
|
||||
"extra_template_vars",
|
||||
"forbidden",
|
||||
"homepage_actions",
|
||||
"menu_links",
|
||||
"permission_allowed",
|
||||
"prepare_connection",
|
||||
"prepare_jinja2_environment",
|
||||
"query_actions",
|
||||
"register_facet_classes",
|
||||
"register_magic_parameters",
|
||||
"register_permissions",
|
||||
"register_routes",
|
||||
"render_cell",
|
||||
"row_actions",
|
||||
"skip_csrf",
|
||||
"startup",
|
||||
"table_actions",
|
||||
"view_actions",
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -114,6 +118,7 @@ def make_app_client(
|
|||
inspect_data=None,
|
||||
static_mounts=None,
|
||||
template_dir=None,
|
||||
config=None,
|
||||
metadata=None,
|
||||
crossdb=False,
|
||||
):
|
||||
|
@ -158,6 +163,7 @@ def make_app_client(
|
|||
memory=memory,
|
||||
cors=cors,
|
||||
metadata=metadata or METADATA,
|
||||
config=config or CONFIG,
|
||||
plugins_dir=PLUGINS_DIR,
|
||||
settings=settings,
|
||||
inspect_data=inspect_data,
|
||||
|
@ -296,16 +302,7 @@ def generate_sortable_rows(num):
|
|||
}
|
||||
|
||||
|
||||
METADATA = {
|
||||
"title": "Datasette Fixtures",
|
||||
"description_html": 'An example SQLite database demonstrating Datasette. <a href="/login-as-root">Sign in as root user</a>',
|
||||
"license": "Apache License 2.0",
|
||||
"license_url": "https://github.com/simonw/datasette/blob/main/LICENSE",
|
||||
"source": "tests/fixtures.py",
|
||||
"source_url": "https://github.com/simonw/datasette/blob/main/tests/fixtures.py",
|
||||
"about": "About Datasette",
|
||||
"about_url": "https://github.com/simonw/datasette",
|
||||
"extra_css_urls": ["/static/extra-css-urls.css"],
|
||||
CONFIG = {
|
||||
"plugins": {
|
||||
"name-of-plugin": {"depth": "root"},
|
||||
"env-plugin": {"foo": {"$env": "FOO_ENV"}},
|
||||
|
@ -314,12 +311,9 @@ METADATA = {
|
|||
},
|
||||
"databases": {
|
||||
"fixtures": {
|
||||
"description": "Test tables description",
|
||||
"plugins": {"name-of-plugin": {"depth": "database"}},
|
||||
"tables": {
|
||||
"simple_primary_key": {
|
||||
"description_html": "Simple <em>primary</em> key",
|
||||
"title": "This <em>HTML</em> is escaped",
|
||||
"plugins": {
|
||||
"name-of-plugin": {
|
||||
"depth": "table",
|
||||
|
@ -328,33 +322,8 @@ METADATA = {
|
|||
},
|
||||
},
|
||||
"sortable": {
|
||||
"sortable_columns": [
|
||||
"sortable",
|
||||
"sortable_with_nulls",
|
||||
"sortable_with_nulls_2",
|
||||
"text",
|
||||
],
|
||||
"plugins": {"name-of-plugin": {"depth": "table"}},
|
||||
},
|
||||
"no_primary_key": {"sortable_columns": [], "hidden": True},
|
||||
"units": {"units": {"distance": "m", "frequency": "Hz"}},
|
||||
"primary_key_multiple_columns_explicit_label": {
|
||||
"label_column": "content2"
|
||||
},
|
||||
"simple_view": {"sortable_columns": ["content"]},
|
||||
"searchable_view_configured_by_metadata": {
|
||||
"fts_table": "searchable_fts",
|
||||
"fts_pk": "pk",
|
||||
},
|
||||
"roadside_attractions": {
|
||||
"columns": {
|
||||
"name": "The name of the attraction",
|
||||
"address": "The street address for the attraction",
|
||||
}
|
||||
},
|
||||
"attraction_characteristic": {"sort_desc": "pk"},
|
||||
"facet_cities": {"sort": "name"},
|
||||
"paginated_view": {"size": 25},
|
||||
},
|
||||
"queries": {
|
||||
"𝐜𝐢𝐭𝐢𝐞𝐬": "select id, name from facet_cities order by id limit 1;",
|
||||
|
@ -381,6 +350,56 @@ METADATA = {
|
|||
},
|
||||
}
|
||||
},
|
||||
"extra_css_urls": ["/static/extra-css-urls.css"],
|
||||
}
|
||||
|
||||
METADATA = {
|
||||
"title": "Datasette Fixtures",
|
||||
"description_html": 'An example SQLite database demonstrating Datasette. <a href="/login-as-root">Sign in as root user</a>',
|
||||
"license": "Apache License 2.0",
|
||||
"license_url": "https://github.com/simonw/datasette/blob/main/LICENSE",
|
||||
"source": "tests/fixtures.py",
|
||||
"source_url": "https://github.com/simonw/datasette/blob/main/tests/fixtures.py",
|
||||
"about": "About Datasette",
|
||||
"about_url": "https://github.com/simonw/datasette",
|
||||
"databases": {
|
||||
"fixtures": {
|
||||
"description": "Test tables description",
|
||||
"tables": {
|
||||
"simple_primary_key": {
|
||||
"description_html": "Simple <em>primary</em> key",
|
||||
"title": "This <em>HTML</em> is escaped",
|
||||
},
|
||||
"sortable": {
|
||||
"sortable_columns": [
|
||||
"sortable",
|
||||
"sortable_with_nulls",
|
||||
"sortable_with_nulls_2",
|
||||
"text",
|
||||
],
|
||||
},
|
||||
"no_primary_key": {"sortable_columns": [], "hidden": True},
|
||||
"units": {"units": {"distance": "m", "frequency": "Hz"}},
|
||||
"primary_key_multiple_columns_explicit_label": {
|
||||
"label_column": "content2"
|
||||
},
|
||||
"simple_view": {"sortable_columns": ["content"]},
|
||||
"searchable_view_configured_by_metadata": {
|
||||
"fts_table": "searchable_fts",
|
||||
"fts_pk": "pk",
|
||||
},
|
||||
"roadside_attractions": {
|
||||
"columns": {
|
||||
"name": "The name of the attraction",
|
||||
"address": "The street address for the attraction",
|
||||
}
|
||||
},
|
||||
"attraction_characteristic": {"sort_desc": "pk"},
|
||||
"facet_cities": {"sort": "name"},
|
||||
"paginated_view": {"size": 25},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
TABLES = (
|
||||
|
@ -767,6 +786,7 @@ def assert_permissions_checked(datasette, actions):
|
|||
default="fixtures.db",
|
||||
type=click.Path(file_okay=True, dir_okay=False),
|
||||
)
|
||||
@click.argument("config", required=False)
|
||||
@click.argument("metadata", required=False)
|
||||
@click.argument(
|
||||
"plugins_path", type=click.Path(file_okay=False, dir_okay=True), required=False
|
||||
|
@ -782,7 +802,7 @@ def assert_permissions_checked(datasette, actions):
|
|||
type=click.Path(file_okay=True, dir_okay=False),
|
||||
help="Write out second test DB to this file",
|
||||
)
|
||||
def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
|
||||
def cli(db_filename, config, metadata, plugins_path, recreate, extra_db_filename):
|
||||
"""Write out the fixtures database used by Datasette's test suite"""
|
||||
if metadata and not metadata.endswith(".json"):
|
||||
raise click.ClickException("Metadata should end with .json")
|
||||
|
@ -805,6 +825,10 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
|
|||
with open(metadata, "w") as fp:
|
||||
fp.write(json.dumps(METADATA, indent=4))
|
||||
print(f"- metadata written to {metadata}")
|
||||
if config:
|
||||
with open(config, "w") as fp:
|
||||
fp.write(json.dumps(CONFIG, indent=4))
|
||||
print(f"- config written to {config}")
|
||||
if plugins_path:
|
||||
path = pathlib.Path(plugins_path)
|
||||
if not path.exists():
|
||||
|
|
|
@ -7,6 +7,7 @@ from datasette.utils.asgi import asgi_send_json, Response
|
|||
import base64
|
||||
import pint
|
||||
import json
|
||||
import urllib
|
||||
|
||||
ureg = pint.UnitRegistry()
|
||||
|
||||
|
@ -39,9 +40,9 @@ def extra_css_urls(template, database, table, view_name, columns, request, datas
|
|||
"database": database,
|
||||
"table": table,
|
||||
"view_name": view_name,
|
||||
"request_path": request.path
|
||||
if request is not None
|
||||
else None,
|
||||
"request_path": (
|
||||
request.path if request is not None else None
|
||||
),
|
||||
"added": (
|
||||
await datasette.get_database().execute("select 3 * 5")
|
||||
).first()[0],
|
||||
|
@ -390,6 +391,50 @@ def table_actions(datasette, database, table, actor):
|
|||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def view_actions(datasette, database, view, actor):
|
||||
if actor:
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.instance(),
|
||||
"label": f"Database: {database}",
|
||||
},
|
||||
{"href": datasette.urls.instance(), "label": f"View: {view}"},
|
||||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def query_actions(datasette, database, query_name, sql):
|
||||
# Don't explain an explain
|
||||
if sql.lower().startswith("explain"):
|
||||
return
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.database(database)
|
||||
+ "?"
|
||||
+ urllib.parse.urlencode(
|
||||
{
|
||||
"sql": "explain " + sql,
|
||||
}
|
||||
),
|
||||
"label": "Explain this query",
|
||||
"description": "Runs a SQLite explain",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def row_actions(datasette, database, table, actor, row):
|
||||
if actor:
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.instance(),
|
||||
"label": f"Row details for {actor['id']}",
|
||||
"description": json.dumps(dict(row), default=repr),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def database_actions(datasette, database, actor, request):
|
||||
if actor:
|
||||
|
@ -404,6 +449,18 @@ def database_actions(datasette, database, actor, request):
|
|||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def homepage_actions(datasette, actor, request):
|
||||
if actor:
|
||||
label = f"Custom homepage for: {actor['id']}"
|
||||
return [
|
||||
{
|
||||
"href": datasette.urls.path("/-/custom-homepage"),
|
||||
"label": label,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@hookimpl
|
||||
def skip_csrf(scope):
|
||||
return scope["path"] == "/skip-csrf"
|
||||
|
|
|
@ -120,7 +120,7 @@ def permission_allowed(datasette, actor, action):
|
|||
assert (
|
||||
2
|
||||
== (
|
||||
await datasette.get_database("_internal").execute("select 1 + 1")
|
||||
await datasette.get_internal_database().execute("select 1 + 1")
|
||||
).first()[0]
|
||||
)
|
||||
if action == "this_is_allowed_async":
|
||||
|
@ -142,7 +142,8 @@ def startup(datasette):
|
|||
async def inner():
|
||||
# Run against _internal so tests that use the ds_client fixture
|
||||
# (which has no databases yet on startup) do not fail:
|
||||
result = await datasette.get_database("_internal").execute("select 1 + 1")
|
||||
internal_db = datasette.get_internal_database()
|
||||
result = await internal_db.execute("select 1 + 1")
|
||||
datasette._startup_hook_calculation = result.first()[0]
|
||||
|
||||
return inner
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
# This should only run in environments where both
|
||||
# datasette-init and datasette-json-html are installed
|
||||
|
||||
PLUGINS=$(datasette plugins)
|
||||
if ! echo "$PLUGINS" | jq 'any(.[]; .name == "datasette-json-html")' | grep -q true; then
|
||||
echo "Test failed: datasette-json-html not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PLUGINS2=$(DATASETTE_LOAD_PLUGINS=datasette-init datasette plugins)
|
||||
if ! echo "$PLUGINS2" | jq 'any(.[]; .name == "datasette-json-html")' | grep -q false; then
|
||||
echo "Test failed: datasette-json-html should not have been loaded"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! echo "$PLUGINS2" | jq 'any(.[]; .name == "datasette-init")' | grep -q true; then
|
||||
echo "Test failed: datasette-init should have been loaded"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PLUGINS3=$(DATASETTE_LOAD_PLUGINS='' datasette plugins)
|
||||
if ! echo "$PLUGINS3" | grep -q '\[\]'; then
|
||||
echo "Test failed: datasette plugins should have returned []"
|
||||
exit 1
|
||||
fi
|
|
@ -771,7 +771,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable):
|
|||
@pytest.mark.asyncio
|
||||
async def test_metadata_json(ds_client):
|
||||
response = await ds_client.get("/-/metadata.json")
|
||||
assert response.json() == METADATA
|
||||
assert response.json() == ds_client.ds.metadata()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -780,13 +780,22 @@ async def test_threads_json(ds_client):
|
|||
expected_keys = {"threads", "num_threads"}
|
||||
if sys.version_info >= (3, 7, 0):
|
||||
expected_keys.update({"tasks", "num_tasks"})
|
||||
assert set(response.json().keys()) == expected_keys
|
||||
data = response.json()
|
||||
assert set(data.keys()) == expected_keys
|
||||
# Should be at least one _execute_writes thread for __INTERNAL__
|
||||
thread_names = [thread["name"] for thread in data["threads"]]
|
||||
assert "_execute_writes for database __INTERNAL__" in thread_names
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_plugins_json(ds_client):
|
||||
response = await ds_client.get("/-/plugins.json")
|
||||
assert EXPECTED_PLUGINS == sorted(response.json(), key=lambda p: p["name"])
|
||||
# Filter out TrackEventPlugin
|
||||
actual_plugins = sorted(
|
||||
[p for p in response.json() if p["name"] != "TrackEventPlugin"],
|
||||
key=lambda p: p["name"],
|
||||
)
|
||||
assert EXPECTED_PLUGINS == actual_plugins
|
||||
# Try with ?all=1
|
||||
response = await ds_client.get("/-/plugins.json?all=1")
|
||||
names = {p["name"] for p in response.json()}
|
||||
|
@ -841,20 +850,6 @@ async def test_settings_json(ds_client):
|
|||
}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"path,expected_redirect",
|
||||
(
|
||||
("/-/config.json", "/-/settings.json"),
|
||||
("/-/config", "/-/settings"),
|
||||
),
|
||||
)
|
||||
async def test_config_redirects_to_settings(ds_client, path, expected_redirect):
|
||||
response = await ds_client.get(path)
|
||||
assert response.status_code == 301
|
||||
assert response.headers["Location"] == expected_redirect
|
||||
|
||||
|
||||
test_json_columns_default_expected = [
|
||||
{"intval": 1, "strval": "s", "floatval": 0.5, "jsonval": '{"foo": "bar"}'}
|
||||
]
|
||||
|
@ -1017,7 +1012,25 @@ async def test_hidden_sqlite_stat1_table():
|
|||
await db.execute_write("analyze")
|
||||
data = (await ds.client.get("/db.json?_show_hidden=1")).json()
|
||||
tables = [(t["name"], t["hidden"]) for t in data["tables"]]
|
||||
assert tables == [("normal", False), ("sqlite_stat1", True)]
|
||||
assert tables in (
|
||||
[("normal", False), ("sqlite_stat1", True)],
|
||||
[("normal", False), ("sqlite_stat1", True), ("sqlite_stat4", True)],
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_hide_tables_starting_with_underscore():
|
||||
ds = Datasette()
|
||||
db = ds.add_memory_database("test_hide_tables_starting_with_underscore")
|
||||
await db.execute_write("create table normal (id integer primary key, name text)")
|
||||
await db.execute_write("create table _hidden (id integer primary key, name text)")
|
||||
data = (
|
||||
await ds.client.get(
|
||||
"/test_hide_tables_starting_with_underscore.json?_show_hidden=1"
|
||||
)
|
||||
).json()
|
||||
tables = [(t["name"], t["hidden"]) for t in data["tables"]]
|
||||
assert tables == [("normal", False), ("_hidden", True)]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -1031,3 +1044,138 @@ async def test_tilde_encoded_database_names(db_name):
|
|||
# And the JSON for that database
|
||||
response2 = await ds.client.get(path + ".json")
|
||||
assert response2.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"config,expected",
|
||||
(
|
||||
({}, {}),
|
||||
({"plugins": {"datasette-foo": "bar"}}, {"plugins": {"datasette-foo": "bar"}}),
|
||||
# Test redaction
|
||||
(
|
||||
{
|
||||
"plugins": {
|
||||
"datasette-auth": {"secret_key": "key"},
|
||||
"datasette-foo": "bar",
|
||||
"datasette-auth2": {"password": "password"},
|
||||
"datasette-sentry": {
|
||||
"dsn": "sentry:///foo",
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"plugins": {
|
||||
"datasette-auth": {"secret_key": "***"},
|
||||
"datasette-foo": "bar",
|
||||
"datasette-auth2": {"password": "***"},
|
||||
"datasette-sentry": {"dsn": "***"},
|
||||
}
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_config_json(config, expected):
|
||||
"/-/config.json should return redacted configuration"
|
||||
ds = Datasette(config=config)
|
||||
response = await ds.client.get("/-/config.json")
|
||||
assert response.json() == expected
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"metadata,expected_config,expected_metadata",
|
||||
(
|
||||
({}, {}, {}),
|
||||
(
|
||||
# Metadata input
|
||||
{
|
||||
"title": "Datasette Fixtures",
|
||||
"databases": {
|
||||
"fixtures": {
|
||||
"tables": {
|
||||
"sortable": {
|
||||
"sortable_columns": [
|
||||
"sortable",
|
||||
"sortable_with_nulls",
|
||||
"sortable_with_nulls_2",
|
||||
"text",
|
||||
],
|
||||
},
|
||||
"no_primary_key": {"sortable_columns": [], "hidden": True},
|
||||
"units": {"units": {"distance": "m", "frequency": "Hz"}},
|
||||
"primary_key_multiple_columns_explicit_label": {
|
||||
"label_column": "content2"
|
||||
},
|
||||
"simple_view": {"sortable_columns": ["content"]},
|
||||
"searchable_view_configured_by_metadata": {
|
||||
"fts_table": "searchable_fts",
|
||||
"fts_pk": "pk",
|
||||
},
|
||||
"roadside_attractions": {
|
||||
"columns": {
|
||||
"name": "The name of the attraction",
|
||||
"address": "The street address for the attraction",
|
||||
}
|
||||
},
|
||||
"attraction_characteristic": {"sort_desc": "pk"},
|
||||
"facet_cities": {"sort": "name"},
|
||||
"paginated_view": {"size": 25},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
# Should produce a config with just the table configuration keys
|
||||
{
|
||||
"databases": {
|
||||
"fixtures": {
|
||||
"tables": {
|
||||
"sortable": {
|
||||
"sortable_columns": [
|
||||
"sortable",
|
||||
"sortable_with_nulls",
|
||||
"sortable_with_nulls_2",
|
||||
"text",
|
||||
]
|
||||
},
|
||||
"units": {"units": {"distance": "m", "frequency": "Hz"}},
|
||||
# These one get redacted:
|
||||
"no_primary_key": "***",
|
||||
"primary_key_multiple_columns_explicit_label": "***",
|
||||
"simple_view": {"sortable_columns": ["content"]},
|
||||
"searchable_view_configured_by_metadata": {
|
||||
"fts_table": "searchable_fts",
|
||||
"fts_pk": "pk",
|
||||
},
|
||||
"attraction_characteristic": {"sort_desc": "pk"},
|
||||
"facet_cities": {"sort": "name"},
|
||||
"paginated_view": {"size": 25},
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
# And metadata with everything else
|
||||
{
|
||||
"title": "Datasette Fixtures",
|
||||
"databases": {
|
||||
"fixtures": {
|
||||
"tables": {
|
||||
"roadside_attractions": {
|
||||
"columns": {
|
||||
"name": "The name of the attraction",
|
||||
"address": "The street address for the attraction",
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_upgrade_metadata(metadata, expected_config, expected_metadata):
|
||||
ds = Datasette(metadata=metadata)
|
||||
response = await ds.client.get("/-/config.json")
|
||||
assert response.json() == expected_config
|
||||
response2 = await ds.client.get("/-/metadata.json")
|
||||
assert response2.json() == expected_metadata
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from datasette.app import Datasette
|
||||
from datasette.utils import sqlite3
|
||||
from .utils import last_event
|
||||
import pytest
|
||||
import time
|
||||
|
||||
|
@ -49,6 +50,35 @@ async def test_insert_row(ds_write):
|
|||
assert response.json()["rows"] == [expected_row]
|
||||
rows = (await ds_write.get_database("data").execute("select * from docs")).rows
|
||||
assert dict(rows[0]) == expected_row
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
assert event.num_rows == 1
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert not event.ignore
|
||||
assert not event.replace
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_insert_row_alter(ds_write):
|
||||
token = write_token(ds_write)
|
||||
response = await ds_write.client.post(
|
||||
"/data/docs/-/insert",
|
||||
json={
|
||||
"row": {"title": "Test", "score": 1.2, "age": 5, "extra": "extra"},
|
||||
"alter": True,
|
||||
},
|
||||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert response.json()["ok"] is True
|
||||
assert response.json()["rows"][0]["extra"] == "extra"
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "alter-table"
|
||||
assert "extra" not in event.before_schema
|
||||
assert "extra" in event.after_schema
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -68,6 +98,16 @@ async def test_insert_rows(ds_write, return_rows):
|
|||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
assert event.num_rows == 20
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert not event.ignore
|
||||
assert not event.replace
|
||||
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
|
@ -181,6 +221,14 @@ async def test_insert_rows(ds_write, return_rows):
|
|||
400,
|
||||
['Cannot use "ignore" and "replace" at the same time'],
|
||||
),
|
||||
(
|
||||
# Replace is not allowed if you don't have update-row
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"}], "replace": True},
|
||||
"insert-but-not-update",
|
||||
403,
|
||||
['Permission denied: need update-row to use "replace"'],
|
||||
),
|
||||
(
|
||||
"/data/docs/-/insert",
|
||||
{"rows": [{"title": "Test"}], "invalid_param": True},
|
||||
|
@ -259,16 +307,27 @@ async def test_insert_rows(ds_write, return_rows):
|
|||
403,
|
||||
["Permission denied: need both insert-row and update-row"],
|
||||
),
|
||||
# Alter table forbidden without alter permission
|
||||
(
|
||||
"/data/docs/-/upsert",
|
||||
{"rows": [{"id": 1, "title": "One", "extra": "extra"}], "alter": True},
|
||||
"update-and-insert-but-no-alter",
|
||||
403,
|
||||
["Permission denied for alter-table"],
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_insert_or_upsert_row_errors(
|
||||
ds_write, path, input, special_case, expected_status, expected_errors
|
||||
):
|
||||
token = write_token(ds_write)
|
||||
token_permissions = []
|
||||
if special_case == "insert-but-not-update":
|
||||
token = write_token(ds_write, permissions=["ir", "vi"])
|
||||
token_permissions = ["ir", "vi"]
|
||||
if special_case == "update-but-not-insert":
|
||||
token = write_token(ds_write, permissions=["ur", "vi"])
|
||||
token_permissions = ["ur", "vi"]
|
||||
if special_case == "update-and-insert-but-no-alter":
|
||||
token_permissions = ["ur", "ir"]
|
||||
token = write_token(ds_write, permissions=token_permissions)
|
||||
if special_case == "duplicate_id":
|
||||
await ds_write.get_database("data").execute_write(
|
||||
"insert into docs (id) values (1)"
|
||||
|
@ -279,16 +338,20 @@ async def test_insert_or_upsert_row_errors(
|
|||
json=input,
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
"Content-Type": "text/plain"
|
||||
if special_case == "invalid_content_type"
|
||||
else "application/json",
|
||||
"Content-Type": (
|
||||
"text/plain"
|
||||
if special_case == "invalid_content_type"
|
||||
else "application/json"
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
actor_response = (
|
||||
await ds_write.client.get("/-/actor.json", headers=kwargs["headers"])
|
||||
).json()
|
||||
print(actor_response)
|
||||
assert set((actor_response["actor"] or {}).get("_r", {}).get("a") or []) == set(
|
||||
token_permissions
|
||||
)
|
||||
|
||||
if special_case == "invalid_json":
|
||||
del kwargs["json"]
|
||||
|
@ -310,6 +373,41 @@ async def test_insert_or_upsert_row_errors(
|
|||
assert before_count == after_count
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("allowed", (True, False))
|
||||
async def test_upsert_permissions_per_table(ds_write, allowed):
|
||||
# https://github.com/simonw/datasette/issues/2262
|
||||
token = "dstok_{}".format(
|
||||
ds_write.sign(
|
||||
{
|
||||
"a": "root",
|
||||
"token": "dstok",
|
||||
"t": int(time.time()),
|
||||
"_r": {
|
||||
"r": {
|
||||
"data": {
|
||||
"docs" if allowed else "other": ["ir", "ur"],
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
namespace="token",
|
||||
)
|
||||
)
|
||||
response = await ds_write.client.post(
|
||||
"/data/docs/-/upsert",
|
||||
json={"rows": [{"id": 1, "title": "One"}]},
|
||||
headers={
|
||||
"Authorization": "Bearer {}".format(token),
|
||||
},
|
||||
)
|
||||
if allowed:
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ok"] is True
|
||||
else:
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"ignore,replace,expected_rows",
|
||||
|
@ -351,6 +449,16 @@ async def test_insert_ignore_replace(
|
|||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "insert-rows"
|
||||
assert event.num_rows == 1
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert event.ignore == ignore
|
||||
assert event.replace == replace
|
||||
|
||||
actual_rows = [
|
||||
dict(r)
|
||||
for r in (
|
||||
|
@ -403,6 +511,12 @@ async def test_insert_ignore_replace(
|
|||
{"id": 1, "title": "Two", "score": 1},
|
||||
],
|
||||
),
|
||||
(
|
||||
# Upsert with an alter
|
||||
{"rows": [{"id": 1, "title": "One"}], "pk": "id"},
|
||||
{"rows": [{"id": 1, "title": "Two", "extra": "extra"}], "alter": True},
|
||||
[{"id": 1, "title": "Two", "extra": "extra"}],
|
||||
),
|
||||
),
|
||||
)
|
||||
@pytest.mark.parametrize("should_return", (False, True))
|
||||
|
@ -425,6 +539,18 @@ async def test_upsert(ds_write, initial, input, expected_rows, should_return):
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["ok"] is True
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.database == "data"
|
||||
assert event.table == "upsert_test"
|
||||
if input.get("alter"):
|
||||
assert event.name == "alter-table"
|
||||
assert "extra" in event.after_schema
|
||||
else:
|
||||
assert event.name == "upsert-rows"
|
||||
assert event.num_rows == 1
|
||||
|
||||
if should_return:
|
||||
# We only expect it to return rows corresponding to those we sent
|
||||
expected_returned_rows = expected_rows[: len(input["rows"])]
|
||||
|
@ -528,6 +654,13 @@ async def test_delete_row(ds_write, table, row_for_create, pks, delete_path):
|
|||
headers=_headers(write_token(ds_write)),
|
||||
)
|
||||
assert delete_response.status_code == 200
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "delete-row"
|
||||
assert event.database == "data"
|
||||
assert event.table == table
|
||||
assert event.pks == str(delete_path).split(",")
|
||||
assert (
|
||||
await ds_write.client.get(
|
||||
"/data.json?_shape=arrayfirst&sql=select+count(*)+from+{}".format(table)
|
||||
|
@ -536,24 +669,33 @@ async def test_delete_row(ds_write, table, row_for_create, pks, delete_path):
|
|||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("scenario", ("no_token", "no_perm", "bad_table"))
|
||||
@pytest.mark.parametrize(
|
||||
"scenario", ("no_token", "no_perm", "bad_table", "cannot_alter")
|
||||
)
|
||||
async def test_update_row_check_permission(ds_write, scenario):
|
||||
if scenario == "no_token":
|
||||
token = "bad_token"
|
||||
elif scenario == "no_perm":
|
||||
token = write_token(ds_write, actor_id="not-root")
|
||||
elif scenario == "cannot_alter":
|
||||
# update-row but no alter-table:
|
||||
token = write_token(ds_write, permissions=["ur"])
|
||||
else:
|
||||
token = write_token(ds_write)
|
||||
|
||||
pk = await _insert_row(ds_write)
|
||||
|
||||
path = "/data/{}/{}/-/delete".format(
|
||||
path = "/data/{}/{}/-/update".format(
|
||||
"docs" if scenario != "bad_table" else "bad_table", pk
|
||||
)
|
||||
|
||||
json_body = {"update": {"title": "New title"}}
|
||||
if scenario == "cannot_alter":
|
||||
json_body["alter"] = True
|
||||
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
json={"update": {"title": "New title"}},
|
||||
json=json_body,
|
||||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 403 if scenario in ("no_token", "bad_token") else 404
|
||||
|
@ -565,6 +707,36 @@ async def test_update_row_check_permission(ds_write, scenario):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_row_invalid_key(ds_write):
|
||||
token = write_token(ds_write)
|
||||
|
||||
pk = await _insert_row(ds_write)
|
||||
|
||||
path = "/data/docs/{}/-/update".format(pk)
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
json={"update": {"title": "New title"}, "bad_key": 1},
|
||||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.json() == {"ok": False, "errors": ["Invalid keys: bad_key"]}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_row_alter(ds_write):
|
||||
token = write_token(ds_write, permissions=["ur", "at"])
|
||||
pk = await _insert_row(ds_write)
|
||||
path = "/data/docs/{}/-/update".format(pk)
|
||||
response = await ds_write.client.post(
|
||||
path,
|
||||
json={"update": {"title": "New title", "extra": "extra"}, "alter": True},
|
||||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json() == {"ok": True}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"input,expected_errors",
|
||||
|
@ -608,6 +780,13 @@ async def test_update_row(ds_write, input, expected_errors, use_return):
|
|||
for k, v in input.items():
|
||||
assert returned_row[k] == v
|
||||
|
||||
# Analytics event
|
||||
event = last_event(ds_write)
|
||||
assert event.actor == {"id": "root", "token": "dstok"}
|
||||
assert event.database == "data"
|
||||
assert event.table == "docs"
|
||||
assert event.pks == [str(pk)]
|
||||
|
||||
# And fetch the row to check it's updated
|
||||
response = await ds_write.client.get(
|
||||
"/data/docs/{}.json?_shape=array".format(pk),
|
||||
|
@ -674,18 +853,26 @@ async def test_drop_table(ds_write, scenario):
|
|||
headers=_headers(token),
|
||||
)
|
||||
assert response2.json() == {"ok": True}
|
||||
# Check event
|
||||
event = last_event(ds_write)
|
||||
assert event.name == "drop-table"
|
||||
assert event.actor == {"id": "root", "token": "dstok"}
|
||||
assert event.table == "docs"
|
||||
assert event.database == "data"
|
||||
# Table should 404
|
||||
assert (await ds_write.client.get("/data/docs")).status_code == 404
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"input,expected_status,expected_response",
|
||||
"input,expected_status,expected_response,expected_events",
|
||||
(
|
||||
# Permission error with a bad token
|
||||
(
|
||||
{"table": "bad", "row": {"id": 1}},
|
||||
403,
|
||||
{"ok": False, "errors": ["Permission denied"]},
|
||||
[],
|
||||
),
|
||||
# Successful creation with columns:
|
||||
(
|
||||
|
@ -732,6 +919,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
")"
|
||||
),
|
||||
},
|
||||
["create-table"],
|
||||
),
|
||||
# Successful creation with rows:
|
||||
(
|
||||
|
@ -767,6 +955,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
),
|
||||
"row_count": 2,
|
||||
},
|
||||
["create-table", "insert-rows"],
|
||||
),
|
||||
# Successful creation with row:
|
||||
(
|
||||
|
@ -795,6 +984,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
),
|
||||
"row_count": 1,
|
||||
},
|
||||
["create-table", "insert-rows"],
|
||||
),
|
||||
# Create with row and no primary key
|
||||
(
|
||||
|
@ -814,6 +1004,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"schema": ("CREATE TABLE [four] (\n" " [name] TEXT\n" ")"),
|
||||
"row_count": 1,
|
||||
},
|
||||
["create-table", "insert-rows"],
|
||||
),
|
||||
# Create table with compound primary key
|
||||
(
|
||||
|
@ -835,6 +1026,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
),
|
||||
"row_count": 1,
|
||||
},
|
||||
["create-table", "insert-rows"],
|
||||
),
|
||||
# Error: Table is required
|
||||
(
|
||||
|
@ -846,6 +1038,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Table is required"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: Invalid table name
|
||||
(
|
||||
|
@ -858,6 +1051,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Invalid table name"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: JSON must be an object
|
||||
(
|
||||
|
@ -867,6 +1061,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["JSON must be an object"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: Cannot specify columns with rows or row
|
||||
(
|
||||
|
@ -880,6 +1075,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Cannot specify columns with rows or row"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: columns, rows or row is required
|
||||
(
|
||||
|
@ -891,6 +1087,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["columns, rows or row is required"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: columns must be a list
|
||||
(
|
||||
|
@ -903,6 +1100,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["columns must be a list"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: columns must be a list of objects
|
||||
(
|
||||
|
@ -915,6 +1113,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["columns must be a list of objects"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: Column name is required
|
||||
(
|
||||
|
@ -927,6 +1126,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Column name is required"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: Unsupported column type
|
||||
(
|
||||
|
@ -939,6 +1139,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Unsupported column type: bad"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: Duplicate column name
|
||||
(
|
||||
|
@ -954,6 +1155,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Duplicate column name: id"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: rows must be a list
|
||||
(
|
||||
|
@ -966,6 +1168,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["rows must be a list"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: rows must be a list of objects
|
||||
(
|
||||
|
@ -978,6 +1181,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["rows must be a list of objects"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: pk must be a string
|
||||
(
|
||||
|
@ -991,6 +1195,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["pk must be a string"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: Cannot specify both pk and pks
|
||||
(
|
||||
|
@ -1005,6 +1210,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["Cannot specify both pk and pks"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: pks must be a list
|
||||
(
|
||||
|
@ -1018,12 +1224,14 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["pks must be a list"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# Error: pks must be a list of strings
|
||||
(
|
||||
{"table": "bad", "row": {"id": 1, "name": "Row 1"}, "pks": [1, 2]},
|
||||
400,
|
||||
{"ok": False, "errors": ["pks must be a list of strings"]},
|
||||
[],
|
||||
),
|
||||
# Error: ignore and replace are mutually exclusive
|
||||
(
|
||||
|
@ -1039,6 +1247,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["ignore and replace are mutually exclusive"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# ignore and replace require row or rows
|
||||
(
|
||||
|
@ -1052,6 +1261,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["ignore and replace require row or rows"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
# ignore and replace require pk or pks
|
||||
(
|
||||
|
@ -1065,6 +1275,7 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["ignore and replace require pk or pks"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
(
|
||||
{
|
||||
|
@ -1077,10 +1288,14 @@ async def test_drop_table(ds_write, scenario):
|
|||
"ok": False,
|
||||
"errors": ["ignore and replace require pk or pks"],
|
||||
},
|
||||
[],
|
||||
),
|
||||
),
|
||||
)
|
||||
async def test_create_table(ds_write, input, expected_status, expected_response):
|
||||
async def test_create_table(
|
||||
ds_write, input, expected_status, expected_response, expected_events
|
||||
):
|
||||
ds_write._tracked_events = []
|
||||
# Special case for expected status of 403
|
||||
if expected_status == 403:
|
||||
token = "bad_token"
|
||||
|
@ -1094,6 +1309,9 @@ async def test_create_table(ds_write, input, expected_status, expected_response)
|
|||
assert response.status_code == expected_status
|
||||
data = response.json()
|
||||
assert data == expected_response
|
||||
# Should have tracked the expected events
|
||||
events = ds_write._tracked_events
|
||||
assert [e.name for e in events] == expected_events
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -1106,7 +1324,7 @@ async def test_create_table(ds_write, input, expected_status, expected_response)
|
|||
["create-table"],
|
||||
{"table": "t", "rows": [{"name": "c"}]},
|
||||
403,
|
||||
["Permission denied - need insert-row"],
|
||||
["Permission denied: need insert-row"],
|
||||
),
|
||||
# This should work:
|
||||
(
|
||||
|
@ -1120,7 +1338,7 @@ async def test_create_table(ds_write, input, expected_status, expected_response)
|
|||
["create-table", "insert-row"],
|
||||
{"table": "t", "rows": [{"id": 1}], "pk": "id", "replace": True},
|
||||
403,
|
||||
["Permission denied - need update-row"],
|
||||
["Permission denied: need update-row"],
|
||||
),
|
||||
),
|
||||
)
|
||||
|
@ -1192,6 +1410,8 @@ async def test_create_table_ignore_replace(ds_write, input, expected_rows_after)
|
|||
)
|
||||
assert first_response.status_code == 201
|
||||
|
||||
ds_write._tracked_events = []
|
||||
|
||||
# Try a second time
|
||||
second_response = await ds_write.client.post(
|
||||
"/data/-/create",
|
||||
|
@ -1203,6 +1423,10 @@ async def test_create_table_ignore_replace(ds_write, input, expected_rows_after)
|
|||
rows = await ds_write.client.get("/data/test_insert_replace.json?_shape=array")
|
||||
assert rows.json() == expected_rows_after
|
||||
|
||||
# Check it fired the right events
|
||||
event_names = [e.name for e in ds_write._tracked_events]
|
||||
assert event_names == ["insert-rows"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_table_error_if_pk_changed(ds_write):
|
||||
|
@ -1283,3 +1507,88 @@ async def test_method_not_allowed(ds_write, path):
|
|||
"ok": False,
|
||||
"error": "Method not allowed",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_uses_alter_by_default_for_new_table(ds_write):
|
||||
ds_write._tracked_events = []
|
||||
token = write_token(ds_write)
|
||||
response = await ds_write.client.post(
|
||||
"/data/-/create",
|
||||
json={
|
||||
"table": "new_table",
|
||||
"rows": [
|
||||
{
|
||||
"name": "Row 1",
|
||||
}
|
||||
]
|
||||
* 100
|
||||
+ [
|
||||
{"name": "Row 2", "extra": "Extra"},
|
||||
],
|
||||
"pk": "id",
|
||||
},
|
||||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
event_names = [e.name for e in ds_write._tracked_events]
|
||||
assert event_names == ["create-table", "insert-rows"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("has_alter_permission", (True, False))
|
||||
async def test_create_using_alter_against_existing_table(
|
||||
ds_write, has_alter_permission
|
||||
):
|
||||
token = write_token(
|
||||
ds_write, permissions=["ir", "ct"] + (["at"] if has_alter_permission else [])
|
||||
)
|
||||
# First create the table
|
||||
response = await ds_write.client.post(
|
||||
"/data/-/create",
|
||||
json={
|
||||
"table": "new_table",
|
||||
"rows": [
|
||||
{
|
||||
"name": "Row 1",
|
||||
}
|
||||
],
|
||||
"pk": "id",
|
||||
},
|
||||
headers=_headers(token),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
ds_write._tracked_events = []
|
||||
# Now try to insert more rows using /-/create with alter=True
|
||||
response2 = await ds_write.client.post(
|
||||
"/data/-/create",
|
||||
json={
|
||||
"table": "new_table",
|
||||
"rows": [{"name": "Row 2", "extra": "extra"}],
|
||||
"pk": "id",
|
||||
"alter": True,
|
||||
},
|
||||
headers=_headers(token),
|
||||
)
|
||||
if not has_alter_permission:
|
||||
assert response2.status_code == 403
|
||||
assert response2.json() == {
|
||||
"ok": False,
|
||||
"errors": ["Permission denied: need alter-table"],
|
||||
}
|
||||
else:
|
||||
assert response2.status_code == 201
|
||||
|
||||
event_names = [e.name for e in ds_write._tracked_events]
|
||||
assert event_names == ["alter-table", "insert-rows"]
|
||||
|
||||
# It should have altered the table
|
||||
alter_event = ds_write._tracked_events[0]
|
||||
assert alter_event.name == "alter-table"
|
||||
assert "extra" not in alter_event.before_schema
|
||||
assert "extra" in alter_event.after_schema
|
||||
|
||||
insert_rows_event = ds_write._tracked_events[1]
|
||||
assert insert_rows_event.name == "insert-rows"
|
||||
assert insert_rows_event.num_rows == 1
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from bs4 import BeautifulSoup as Soup
|
||||
from .fixtures import app_client
|
||||
from .utils import cookie_was_deleted
|
||||
from .utils import cookie_was_deleted, last_event
|
||||
from click.testing import CliRunner
|
||||
from datasette.utils import baseconv
|
||||
from datasette.cli import cli
|
||||
|
@ -19,6 +19,10 @@ async def test_auth_token(ds_client):
|
|||
assert {"a": {"id": "root"}} == ds_client.ds.unsign(
|
||||
response.cookies["ds_actor"], "actor"
|
||||
)
|
||||
# Should have recorded a login event
|
||||
event = last_event(ds_client.ds)
|
||||
assert event.name == "login"
|
||||
assert event.actor == {"id": "root"}
|
||||
# Check that a second with same token fails
|
||||
assert ds_client.ds._root_token is None
|
||||
assert (await ds_client.get(path)).status_code == 403
|
||||
|
@ -57,7 +61,7 @@ async def test_actor_cookie_that_expires(ds_client, offset, expected):
|
|||
cookie = ds_client.ds.sign(
|
||||
{"a": {"id": "test"}, "e": baseconv.base62.encode(expires_at)}, "actor"
|
||||
)
|
||||
response = await ds_client.get("/", cookies={"ds_actor": cookie})
|
||||
await ds_client.get("/", cookies={"ds_actor": cookie})
|
||||
assert ds_client.ds._last_request.scope["actor"] == expected
|
||||
|
||||
|
||||
|
@ -86,6 +90,10 @@ def test_logout(app_client):
|
|||
csrftoken_from=True,
|
||||
cookies={"ds_actor": app_client.actor_cookie({"id": "test"})},
|
||||
)
|
||||
# Should have recorded a logout event
|
||||
event = last_event(app_client.ds)
|
||||
assert event.name == "logout"
|
||||
assert event.actor == {"id": "test"}
|
||||
# The ds_actor cookie should have been unset
|
||||
assert cookie_was_deleted(response4, "ds_actor")
|
||||
# Should also have set a message
|
||||
|
@ -102,7 +110,7 @@ async def test_logout_button_in_navigation(ds_client, path):
|
|||
anon_response = await ds_client.get(path)
|
||||
for fragment in (
|
||||
"<strong>test</strong>",
|
||||
'<form action="/-/logout" method="post">',
|
||||
'<form class="nav-menu-logout" action="/-/logout" method="post">',
|
||||
):
|
||||
assert fragment in response.text
|
||||
assert fragment not in anon_response.text
|
||||
|
@ -113,7 +121,10 @@ async def test_logout_button_in_navigation(ds_client, path):
|
|||
async def test_no_logout_button_in_navigation_if_no_ds_actor_cookie(ds_client, path):
|
||||
response = await ds_client.get(path + "?_bot=1")
|
||||
assert "<strong>bot</strong>" in response.text
|
||||
assert '<form action="/-/logout" method="post">' not in response.text
|
||||
assert (
|
||||
'<form class="nav-menu-logout" action="/-/logout" method="post">'
|
||||
not in response.text
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -185,6 +196,13 @@ def test_auth_create_token(
|
|||
for error in errors:
|
||||
assert '<p class="message-error">{}</p>'.format(error) in response2.text
|
||||
else:
|
||||
# Check create-token event
|
||||
event = last_event(app_client.ds)
|
||||
assert event.name == "create-token"
|
||||
assert event.expires_after == expected_duration
|
||||
assert isinstance(event.restrict_all, list)
|
||||
assert isinstance(event.restrict_database, dict)
|
||||
assert isinstance(event.restrict_resource, dict)
|
||||
# Extract token from page
|
||||
token = response2.text.split('value="dstok_')[1].split('"')[0]
|
||||
details = app_client.ds.unsign(token, "token")
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import black
|
||||
from click.testing import CliRunner
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
code_root = Path(__file__).parent.parent
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ def canned_write_client(tmpdir):
|
|||
with make_app_client(
|
||||
extra_databases={"data.db": "create table names (name text)"},
|
||||
template_dir=str(template_dir),
|
||||
metadata={
|
||||
config={
|
||||
"databases": {
|
||||
"data": {
|
||||
"queries": {
|
||||
|
@ -63,7 +63,7 @@ def canned_write_client(tmpdir):
|
|||
def canned_write_immutable_client():
|
||||
with make_app_client(
|
||||
is_immutable=True,
|
||||
metadata={
|
||||
config={
|
||||
"databases": {
|
||||
"fixtures": {
|
||||
"queries": {
|
||||
|
@ -172,7 +172,7 @@ def test_insert_error(canned_write_client):
|
|||
)
|
||||
assert [["UNIQUE constraint failed: names.rowid", 3]] == messages
|
||||
# How about with a custom error message?
|
||||
canned_write_client.ds._metadata["databases"]["data"]["queries"][
|
||||
canned_write_client.ds.config["databases"]["data"]["queries"][
|
||||
"add_name_specify_id"
|
||||
]["on_error_message"] = "ERROR"
|
||||
response = canned_write_client.post(
|
||||
|
@ -316,7 +316,7 @@ def test_canned_query_permissions(canned_write_client):
|
|||
def magic_parameters_client():
|
||||
with make_app_client(
|
||||
extra_databases={"data.db": "create table logs (line text)"},
|
||||
metadata={
|
||||
config={
|
||||
"databases": {
|
||||
"data": {
|
||||
"queries": {
|
||||
|
@ -345,10 +345,10 @@ def magic_parameters_client():
|
|||
],
|
||||
)
|
||||
def test_magic_parameters(magic_parameters_client, magic_parameter, expected_re):
|
||||
magic_parameters_client.ds._metadata["databases"]["data"]["queries"]["runme_post"][
|
||||
magic_parameters_client.ds.config["databases"]["data"]["queries"]["runme_post"][
|
||||
"sql"
|
||||
] = f"insert into logs (line) values (:{magic_parameter})"
|
||||
magic_parameters_client.ds._metadata["databases"]["data"]["queries"]["runme_get"][
|
||||
magic_parameters_client.ds.config["databases"]["data"]["queries"]["runme_get"][
|
||||
"sql"
|
||||
] = f"select :{magic_parameter} as result"
|
||||
cookies = {
|
||||
|
@ -384,7 +384,7 @@ def test_magic_parameters(magic_parameters_client, magic_parameter, expected_re)
|
|||
@pytest.mark.parametrize("use_csrf", [True, False])
|
||||
@pytest.mark.parametrize("return_json", [True, False])
|
||||
def test_magic_parameters_csrf_json(magic_parameters_client, use_csrf, return_json):
|
||||
magic_parameters_client.ds._metadata["databases"]["data"]["queries"]["runme_post"][
|
||||
magic_parameters_client.ds.config["databases"]["data"]["queries"]["runme_post"][
|
||||
"sql"
|
||||
] = "insert into logs (line) values (:_header_host)"
|
||||
qs = ""
|
||||
|
|
|
@ -4,7 +4,6 @@ from .fixtures import (
|
|||
TestClient as _TestClient,
|
||||
EXPECTED_PLUGINS,
|
||||
)
|
||||
import asyncio
|
||||
from datasette.app import SETTINGS
|
||||
from datasette.plugins import DEFAULT_PLUGINS
|
||||
from datasette.cli import cli, serve
|
||||
|
@ -19,7 +18,6 @@ import pytest
|
|||
import sys
|
||||
import textwrap
|
||||
from unittest import mock
|
||||
import urllib
|
||||
|
||||
|
||||
def test_inspect_cli(app_client):
|
||||
|
@ -100,7 +98,11 @@ def test_spatialite_error_if_cannot_find_load_extension_spatialite():
|
|||
def test_plugins_cli(app_client):
|
||||
runner = CliRunner()
|
||||
result1 = runner.invoke(cli, ["plugins"])
|
||||
assert json.loads(result1.output) == EXPECTED_PLUGINS
|
||||
actual_plugins = sorted(
|
||||
[p for p in json.loads(result1.output) if p["name"] != "TrackEventPlugin"],
|
||||
key=lambda p: p["name"],
|
||||
)
|
||||
assert actual_plugins == EXPECTED_PLUGINS
|
||||
# Try with --all
|
||||
result2 = runner.invoke(cli, ["plugins", "--all"])
|
||||
names = [p["name"] for p in json.loads(result2.output)]
|
||||
|
@ -142,6 +144,7 @@ def test_metadata_yaml():
|
|||
secret=None,
|
||||
root=False,
|
||||
token=None,
|
||||
actor=None,
|
||||
version_note=None,
|
||||
get=None,
|
||||
help_settings=False,
|
||||
|
@ -153,6 +156,7 @@ def test_metadata_yaml():
|
|||
ssl_keyfile=None,
|
||||
ssl_certfile=None,
|
||||
return_instance=True,
|
||||
internal=None,
|
||||
)
|
||||
client = _TestClient(ds)
|
||||
response = client.get("/-/metadata.json")
|
||||
|
@ -220,20 +224,65 @@ def test_serve_invalid_ports(invalid_port):
|
|||
assert "Invalid value for '-p'" in result.stderr
|
||||
|
||||
|
||||
def test_setting():
|
||||
@pytest.mark.parametrize(
|
||||
"args",
|
||||
(
|
||||
["--setting", "default_page_size", "5"],
|
||||
["--setting", "settings.default_page_size", "5"],
|
||||
["-s", "settings.default_page_size", "5"],
|
||||
),
|
||||
)
|
||||
def test_setting(args):
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["--get", "/-/settings.json"] + args)
|
||||
assert result.exit_code == 0, result.output
|
||||
settings = json.loads(result.output)
|
||||
assert settings["default_page_size"] == 5
|
||||
|
||||
|
||||
def test_plugin_s_overwrite():
|
||||
runner = CliRunner()
|
||||
plugins_dir = str(pathlib.Path(__file__).parent / "plugins")
|
||||
|
||||
result = runner.invoke(
|
||||
cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"]
|
||||
cli,
|
||||
[
|
||||
"--plugins-dir",
|
||||
plugins_dir,
|
||||
"--get",
|
||||
"/_memory.json?sql=select+prepare_connection_args()",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0, result.output
|
||||
assert json.loads(result.output)["default_page_size"] == 5
|
||||
assert (
|
||||
json.loads(result.output).get("rows")[0].get("prepare_connection_args()")
|
||||
== 'database=_memory, datasette.plugin_config("name-of-plugin")=None'
|
||||
)
|
||||
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
[
|
||||
"--plugins-dir",
|
||||
plugins_dir,
|
||||
"--get",
|
||||
"/_memory.json?sql=select+prepare_connection_args()",
|
||||
"-s",
|
||||
"plugins.name-of-plugin",
|
||||
"OVERRIDE",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0, result.output
|
||||
assert (
|
||||
json.loads(result.output).get("rows")[0].get("prepare_connection_args()")
|
||||
== 'database=_memory, datasette.plugin_config("name-of-plugin")=OVERRIDE'
|
||||
)
|
||||
|
||||
|
||||
def test_setting_type_validation():
|
||||
runner = CliRunner(mix_stderr=False)
|
||||
result = runner.invoke(cli, ["--setting", "default_page_size", "dog"])
|
||||
assert result.exit_code == 2
|
||||
assert '"default_page_size" should be an integer' in result.stderr
|
||||
assert '"settings.default_page_size" should be an integer' in result.stderr
|
||||
|
||||
|
||||
@pytest.mark.parametrize("default_allow_sql", (True, False))
|
||||
|
@ -258,17 +307,6 @@ def test_setting_default_allow_sql(default_allow_sql):
|
|||
assert "Forbidden" in result.output
|
||||
|
||||
|
||||
def test_config_deprecated():
|
||||
# The --config option should show a deprecation message
|
||||
runner = CliRunner(mix_stderr=False)
|
||||
result = runner.invoke(
|
||||
cli, ["--config", "allow_download:off", "--get", "/-/settings.json"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert not json.loads(result.output)["allow_download"]
|
||||
assert "will be deprecated in" in result.stderr
|
||||
|
||||
|
||||
def test_sql_errors_logged_to_stderr():
|
||||
runner = CliRunner(mix_stderr=False)
|
||||
result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"])
|
||||
|
@ -294,6 +332,32 @@ def test_serve_create(tmpdir):
|
|||
assert db_path.exists()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("argument", ("-c", "--config"))
|
||||
@pytest.mark.parametrize("format_", ("json", "yaml"))
|
||||
def test_serve_config(tmpdir, argument, format_):
|
||||
config_path = tmpdir / "datasette.{}".format(format_)
|
||||
config_path.write_text(
|
||||
(
|
||||
"settings:\n default_page_size: 5\n"
|
||||
if format_ == "yaml"
|
||||
else '{"settings": {"default_page_size": 5}}'
|
||||
),
|
||||
"utf-8",
|
||||
)
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
[
|
||||
argument,
|
||||
str(config_path),
|
||||
"--get",
|
||||
"/-/settings.json",
|
||||
],
|
||||
)
|
||||
assert result.exit_code == 0, result.output
|
||||
assert json.loads(result.output)["default_page_size"] == 5
|
||||
|
||||
|
||||
def test_serve_duplicate_database_names(tmpdir):
|
||||
"'datasette db.db nested/db.db' should attach two databases, /db and /db_2"
|
||||
runner = CliRunner()
|
||||
|
@ -349,9 +413,12 @@ def test_help_settings():
|
|||
assert setting.name in result.output
|
||||
|
||||
|
||||
@pytest.mark.parametrize("setting", ("hash_urls", "default_cache_ttl_hashed"))
|
||||
def test_help_error_on_hash_urls_setting(setting):
|
||||
def test_internal_db(tmpdir):
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(cli, ["--setting", setting, 1])
|
||||
assert result.exit_code == 2
|
||||
assert "The hash_urls setting has been removed" in result.output
|
||||
internal_path = tmpdir / "internal.db"
|
||||
assert not internal_path.exists()
|
||||
result = runner.invoke(
|
||||
cli, ["--memory", "--internal", str(internal_path), "--get", "/"]
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert internal_path.exists()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from datasette.cli import cli, serve
|
||||
from datasette.cli import cli
|
||||
from datasette.plugins import pm
|
||||
from click.testing import CliRunner
|
||||
import textwrap
|
||||
|
@ -80,7 +80,7 @@ def test_serve_with_get_and_token():
|
|||
assert json.loads(result2.output) == {"actor": {"id": "root", "token": "dstok"}}
|
||||
|
||||
|
||||
def test_serve_with_get_exit_code_for_error(tmp_path_factory):
|
||||
def test_serve_with_get_exit_code_for_error():
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
|
@ -94,3 +94,26 @@ def test_serve_with_get_exit_code_for_error(tmp_path_factory):
|
|||
)
|
||||
assert result.exit_code == 1
|
||||
assert "404" in result.output
|
||||
|
||||
|
||||
def test_serve_get_actor():
|
||||
runner = CliRunner()
|
||||
result = runner.invoke(
|
||||
cli,
|
||||
[
|
||||
"serve",
|
||||
"--memory",
|
||||
"--get",
|
||||
"/-/actor.json",
|
||||
"--actor",
|
||||
'{"id": "root", "extra": "x"}',
|
||||
],
|
||||
catch_exceptions=False,
|
||||
)
|
||||
assert result.exit_code == 0
|
||||
assert json.loads(result.output) == {
|
||||
"actor": {
|
||||
"id": "root",
|
||||
"extra": "x",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,11 +3,9 @@ import pathlib
|
|||
import pytest
|
||||
|
||||
from datasette.app import Datasette
|
||||
from datasette.cli import cli
|
||||
from datasette.utils.sqlite import sqlite3
|
||||
from datasette.utils import StartupError
|
||||
from .fixtures import TestClient as _TestClient
|
||||
from click.testing import CliRunner
|
||||
|
||||
PLUGIN = """
|
||||
from datasette import hookimpl
|
||||
|
@ -19,8 +17,10 @@ def extra_template_vars():
|
|||
}
|
||||
"""
|
||||
METADATA = {"title": "This is from metadata"}
|
||||
SETTINGS = {
|
||||
"default_cache_ttl": 60,
|
||||
CONFIG = {
|
||||
"settings": {
|
||||
"default_cache_ttl": 60,
|
||||
}
|
||||
}
|
||||
CSS = """
|
||||
body { margin-top: 3em}
|
||||
|
@ -47,7 +47,7 @@ def config_dir(tmp_path_factory):
|
|||
(static_dir / "hello.css").write_text(CSS, "utf-8")
|
||||
|
||||
(config_dir / "metadata.json").write_text(json.dumps(METADATA), "utf-8")
|
||||
(config_dir / "settings.json").write_text(json.dumps(SETTINGS), "utf-8")
|
||||
(config_dir / "datasette.json").write_text(json.dumps(CONFIG), "utf-8")
|
||||
|
||||
for dbname in ("demo.db", "immutable.db", "j.sqlite3", "k.sqlite"):
|
||||
db = sqlite3.connect(str(config_dir / dbname))
|
||||
|
@ -81,16 +81,16 @@ def config_dir(tmp_path_factory):
|
|||
|
||||
|
||||
def test_invalid_settings(config_dir):
|
||||
previous = (config_dir / "settings.json").read_text("utf-8")
|
||||
(config_dir / "settings.json").write_text(
|
||||
json.dumps({"invalid": "invalid-setting"}), "utf-8"
|
||||
previous = (config_dir / "datasette.json").read_text("utf-8")
|
||||
(config_dir / "datasette.json").write_text(
|
||||
json.dumps({"settings": {"invalid": "invalid-setting"}}), "utf-8"
|
||||
)
|
||||
try:
|
||||
with pytest.raises(StartupError) as ex:
|
||||
ds = Datasette([], config_dir=config_dir)
|
||||
assert ex.value.args[0] == "Invalid setting 'invalid' in settings.json"
|
||||
assert ex.value.args[0] == "Invalid setting 'invalid' in datasette.json"
|
||||
finally:
|
||||
(config_dir / "settings.json").write_text(previous, "utf-8")
|
||||
(config_dir / "datasette.json").write_text(previous, "utf-8")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
|
@ -111,15 +111,6 @@ def test_settings(config_dir_client):
|
|||
assert 60 == response.json["default_cache_ttl"]
|
||||
|
||||
|
||||
def test_error_on_config_json(tmp_path_factory):
|
||||
config_dir = tmp_path_factory.mktemp("config-dir")
|
||||
(config_dir / "config.json").write_text(json.dumps(SETTINGS), "utf-8")
|
||||
runner = CliRunner(mix_stderr=False)
|
||||
result = runner.invoke(cli, [str(config_dir), "--get", "/-/settings.json"])
|
||||
assert result.exit_code == 1
|
||||
assert "config.json should be renamed to settings.json" in result.stderr
|
||||
|
||||
|
||||
def test_plugins(config_dir_client):
|
||||
response = config_dir_client.get("/-/plugins.json")
|
||||
assert 200 == response.status
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from datasette.app import Datasette
|
||||
from bs4 import BeautifulSoup as Soup
|
||||
import pytest
|
||||
from .fixtures import ( # noqa
|
||||
|
@ -95,6 +96,40 @@ async def test_table_csv_with_nullable_labels(ds_client):
|
|||
assert response.text == EXPECTED_TABLE_WITH_NULLABLE_LABELS_CSV
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_table_csv_with_invalid_labels():
|
||||
# https://github.com/simonw/datasette/issues/2214
|
||||
ds = Datasette()
|
||||
await ds.invoke_startup()
|
||||
db = ds.add_memory_database("db_2214")
|
||||
await db.execute_write_script(
|
||||
"""
|
||||
create table t1 (id integer primary key, name text);
|
||||
insert into t1 (id, name) values (1, 'one');
|
||||
insert into t1 (id, name) values (2, 'two');
|
||||
create table t2 (textid text primary key, name text);
|
||||
insert into t2 (textid, name) values ('a', 'alpha');
|
||||
insert into t2 (textid, name) values ('b', 'beta');
|
||||
create table if not exists maintable (
|
||||
id integer primary key,
|
||||
fk_integer integer references t1(id),
|
||||
fk_text text references t2(textid)
|
||||
);
|
||||
insert into maintable (id, fk_integer, fk_text) values (1, 1, 'a');
|
||||
insert into maintable (id, fk_integer, fk_text) values (2, 3, 'b'); -- invalid fk_integer
|
||||
insert into maintable (id, fk_integer, fk_text) values (3, 2, 'c'); -- invalid fk_text
|
||||
"""
|
||||
)
|
||||
response = await ds.client.get("/db_2214/maintable.csv?_labels=1")
|
||||
assert response.status_code == 200
|
||||
assert response.text == (
|
||||
"id,fk_integer,fk_integer_label,fk_text,fk_text_label\r\n"
|
||||
"1,1,one,a,alpha\r\n"
|
||||
"2,3,,b,beta\r\n"
|
||||
"3,2,two,c,\r\n"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_table_csv_blob_columns(ds_client):
|
||||
response = await ds_client.get("/fixtures/binary_data.csv")
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
"""
|
||||
Tests to ensure certain things are documented.
|
||||
"""
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datasette import app, utils
|
||||
from datasette.cli import cli
|
||||
from datasette.app import Datasette
|
||||
from datasette.filters import Filters
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
|
@ -42,7 +42,9 @@ def plugin_hooks_content():
|
|||
"plugin", [name for name in dir(app.pm.hook) if not name.startswith("_")]
|
||||
)
|
||||
def test_plugin_hooks_are_documented(plugin, plugin_hooks_content):
|
||||
headings = get_headings(plugin_hooks_content, "-")
|
||||
headings = set()
|
||||
headings.update(get_headings(plugin_hooks_content, "-"))
|
||||
headings.update(get_headings(plugin_hooks_content, "~"))
|
||||
assert plugin in headings
|
||||
hook_caller = getattr(app.pm.hook, plugin)
|
||||
arg_names = [a for a in hook_caller.spec.argnames if a != "__multicall__"]
|
||||
|
@ -102,3 +104,35 @@ def documented_fns():
|
|||
@pytest.mark.parametrize("fn", utils.functions_marked_as_documented)
|
||||
def test_functions_marked_with_documented_are_documented(documented_fns, fn):
|
||||
assert fn.__name__ in documented_fns
|
||||
|
||||
|
||||
# Tests for testing_plugins.rst documentation
|
||||
|
||||
# fmt: off
|
||||
# -- start test_homepage --
|
||||
@pytest.mark.asyncio
|
||||
async def test_homepage():
|
||||
ds = Datasette(memory=True)
|
||||
response = await ds.client.get("/")
|
||||
html = response.text
|
||||
assert "<h1>" in html
|
||||
# -- end test_homepage --
|
||||
|
||||
|
||||
# -- start test_actor_is_null --
|
||||
@pytest.mark.asyncio
|
||||
async def test_actor_is_null():
|
||||
ds = Datasette(memory=True)
|
||||
response = await ds.client.get("/-/actor.json")
|
||||
assert response.json() == {"actor": None}
|
||||
# -- end test_actor_is_null --
|
||||
|
||||
|
||||
# -- start test_signed_cookie_actor --
|
||||
@pytest.mark.asyncio
|
||||
async def test_signed_cookie_actor():
|
||||
ds = Datasette(memory=True)
|
||||
cookies = {"ds_actor": ds.client.actor_cookie({"id": "root"})}
|
||||
response = await ds.client.get("/-/actor.json", cookies=cookies)
|
||||
assert response.json() == {"actor": {"id": "root"}}
|
||||
# -- end test_signed_cookie_actor --
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
# fmt: off
|
||||
# -- start datasette_with_plugin_fixture --
|
||||
from datasette import hookimpl
|
||||
from datasette.app import Datasette
|
||||
from datasette.plugins import pm
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def datasette_with_plugin():
|
||||
class TestPlugin:
|
||||
__name__ = "TestPlugin"
|
||||
|
||||
@hookimpl
|
||||
def register_routes(self):
|
||||
return [
|
||||
(r"^/error$", lambda: 1 / 0),
|
||||
]
|
||||
|
||||
pm.register(TestPlugin(), name="undo")
|
||||
try:
|
||||
yield Datasette()
|
||||
finally:
|
||||
pm.unregister(name="undo")
|
||||
# -- end datasette_with_plugin_fixture --
|
||||
|
||||
|
||||
# -- start datasette_with_plugin_test --
|
||||
@pytest.mark.asyncio
|
||||
async def test_error(datasette_with_plugin):
|
||||
response = await datasette_with_plugin.client.get("/error")
|
||||
assert response.status_code == 500
|
||||
# -- end datasette_with_plugin_test --
|
|
@ -82,7 +82,7 @@ async def test_column_facet_suggest_skip_if_enabled_by_metadata(ds_client):
|
|||
database="fixtures",
|
||||
sql="select * from facetable",
|
||||
table="facetable",
|
||||
metadata={"facets": ["_city_id"]},
|
||||
table_config={"facets": ["_city_id"]},
|
||||
)
|
||||
suggestions = [s["name"] for s in await facet.suggest()]
|
||||
assert [
|
||||
|
@ -278,7 +278,7 @@ async def test_column_facet_from_metadata_cannot_be_hidden(ds_client):
|
|||
database="fixtures",
|
||||
sql="select * from facetable",
|
||||
table="facetable",
|
||||
metadata={"facets": ["_city_id"]},
|
||||
table_config={"facets": ["_city_id"]},
|
||||
)
|
||||
buckets, timed_out = await facet.facet_results()
|
||||
assert [] == timed_out
|
||||
|
@ -643,3 +643,23 @@ async def test_conflicting_facet_names_json(ds_client):
|
|||
"created_2",
|
||||
"tags_2",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_facet_against_in_memory_database():
|
||||
ds = Datasette()
|
||||
db = ds.add_memory_database("mem")
|
||||
await db.execute_write(
|
||||
"create table t (id integer primary key, name text, name2 text)"
|
||||
)
|
||||
to_insert = [{"name": "one", "name2": "1"} for _ in range(800)] + [
|
||||
{"name": "two", "name2": "2"} for _ in range(300)
|
||||
]
|
||||
print(to_insert)
|
||||
await db.execute_write_many(
|
||||
"insert into t (name, name2) values (:name, :name2)", to_insert
|
||||
)
|
||||
response1 = await ds.client.get("/mem/t")
|
||||
assert response1.status_code == 200
|
||||
response2 = await ds.client.get("/mem/t?_facet=name&_facet=name2")
|
||||
assert response2.status_code == 200
|
||||
|
|
|
@ -7,6 +7,11 @@ import pytest
|
|||
"args,expected_where,expected_params",
|
||||
[
|
||||
((("name_english__contains", "foo"),), ['"name_english" like :p0'], ["%foo%"]),
|
||||
(
|
||||
(("name_english__notcontains", "foo"),),
|
||||
['"name_english" not like :p0'],
|
||||
["%foo%"],
|
||||
),
|
||||
(
|
||||
(("foo", "bar"), ("bar__contains", "baz")),
|
||||
['"bar" like :p0', '"foo" = :p1'],
|
||||
|
|
|
@ -9,6 +9,7 @@ from .fixtures import ( # noqa
|
|||
METADATA,
|
||||
)
|
||||
from .utils import assert_footer_links, inner_html
|
||||
import copy
|
||||
import json
|
||||
import pathlib
|
||||
import pytest
|
||||
|
@ -518,7 +519,7 @@ def test_allow_download_off():
|
|||
|
||||
|
||||
def test_allow_sql_off():
|
||||
with make_app_client(metadata={"allow_sql": {}}) as client:
|
||||
with make_app_client(config={"allow_sql": {}}) as client:
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.content, "html.parser")
|
||||
assert not len(soup.findAll("textarea", {"name": "sql"}))
|
||||
|
@ -655,7 +656,7 @@ def test_canned_query_show_hide_metadata_option(
|
|||
expected_show_hide_text,
|
||||
):
|
||||
with make_app_client(
|
||||
metadata={
|
||||
config={
|
||||
"databases": {
|
||||
"_memory": {
|
||||
"queries": {
|
||||
|
@ -752,7 +753,7 @@ async def test_metadata_json_html(ds_client):
|
|||
response = await ds_client.get("/-/metadata")
|
||||
assert response.status_code == 200
|
||||
pre = Soup(response.content, "html.parser").find("pre")
|
||||
assert METADATA == json.loads(pre.text)
|
||||
assert ds_client.ds.metadata() == json.loads(pre.text)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -908,7 +909,7 @@ async def test_edit_sql_link_on_canned_queries(ds_client, path, expected):
|
|||
@pytest.mark.parametrize("permission_allowed", [True, False])
|
||||
def test_edit_sql_link_not_shown_if_user_lacks_permission(permission_allowed):
|
||||
with make_app_client(
|
||||
metadata={
|
||||
config={
|
||||
"allow_sql": None if permission_allowed else {"id": "not-you"},
|
||||
"databases": {"fixtures": {"queries": {"simple": "select 1 + 1"}}},
|
||||
}
|
||||
|
@ -1057,7 +1058,7 @@ async def test_redirect_percent_encoding_to_tilde_encoding(ds_client, path, expe
|
|||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"path,metadata,expected_links",
|
||||
"path,config,expected_links",
|
||||
(
|
||||
("/fixtures", {}, [("/", "home")]),
|
||||
("/fixtures", {"allow": False, "databases": {"fixtures": {"allow": True}}}, []),
|
||||
|
@ -1080,21 +1081,23 @@ async def test_redirect_percent_encoding_to_tilde_encoding(ds_client, path, expe
|
|||
{"allow": False, "databases": {"fixtures": {"allow": True}}},
|
||||
[("/fixtures", "fixtures"), ("/fixtures/facetable", "facetable")],
|
||||
),
|
||||
(
|
||||
"/fixtures/facetable/1",
|
||||
{
|
||||
"allow": False,
|
||||
"databases": {"fixtures": {"tables": {"facetable": {"allow": True}}}},
|
||||
},
|
||||
[("/fixtures/facetable", "facetable")],
|
||||
),
|
||||
# TODO: what
|
||||
# (
|
||||
# "/fixtures/facetable/1",
|
||||
# {
|
||||
# "allow": False,
|
||||
# "databases": {"fixtures": {"tables": {"facetable": {"allow": True}}}},
|
||||
# },
|
||||
# [("/fixtures/facetable", "facetable")],
|
||||
# ),
|
||||
),
|
||||
)
|
||||
async def test_breadcrumbs_respect_permissions(
|
||||
ds_client, path, metadata, expected_links
|
||||
):
|
||||
orig = ds_client.ds._metadata_local
|
||||
ds_client.ds._metadata_local = metadata
|
||||
async def test_breadcrumbs_respect_permissions(ds_client, path, config, expected_links):
|
||||
previous_config = ds_client.ds.config
|
||||
updated_config = copy.deepcopy(previous_config)
|
||||
updated_config.update(config)
|
||||
ds_client.ds.config = updated_config
|
||||
|
||||
try:
|
||||
response = await ds_client.ds.client.get(path)
|
||||
soup = Soup(response.text, "html.parser")
|
||||
|
@ -1102,4 +1105,29 @@ async def test_breadcrumbs_respect_permissions(
|
|||
actual = [(a["href"], a.text) for a in breadcrumbs]
|
||||
assert actual == expected_links
|
||||
finally:
|
||||
ds_client.ds._metadata_local = orig
|
||||
ds_client.ds.config = previous_config
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_database_color(ds_client):
|
||||
expected_color = ds_client.ds.get_database("fixtures").color
|
||||
# Should be something like #9403e5
|
||||
expected_fragments = (
|
||||
"10px solid #{}".format(expected_color),
|
||||
"border-color: #{}".format(expected_color),
|
||||
)
|
||||
assert len(expected_color) == 6
|
||||
for path in (
|
||||
"/",
|
||||
"/fixtures",
|
||||
"/fixtures/facetable",
|
||||
"/fixtures/paginated_view",
|
||||
"/fixtures/pragma_cache_size",
|
||||
):
|
||||
response = await ds_client.get(path)
|
||||
result = any(fragment in response.text for fragment in expected_fragments)
|
||||
if not result:
|
||||
import pdb
|
||||
|
||||
pdb.set_trace()
|
||||
assert any(fragment in response.text for fragment in expected_fragments)
|
||||
|
|
|
@ -1,55 +1,35 @@
|
|||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_internal_only_available_to_root(ds_client):
|
||||
cookie = ds_client.actor_cookie({"id": "root"})
|
||||
assert (await ds_client.get("/_internal")).status_code == 403
|
||||
assert (
|
||||
await ds_client.get("/_internal", cookies={"ds_actor": cookie})
|
||||
).status_code == 200
|
||||
# ensure refresh_schemas() gets called before interacting with internal_db
|
||||
async def ensure_internal(ds_client):
|
||||
await ds_client.get("/fixtures.json?sql=select+1")
|
||||
return ds_client.ds.get_internal_database()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_internal_databases(ds_client):
|
||||
cookie = ds_client.actor_cookie({"id": "root"})
|
||||
databases = (
|
||||
await ds_client.get(
|
||||
"/_internal/databases.json?_shape=array", cookies={"ds_actor": cookie}
|
||||
)
|
||||
).json()
|
||||
assert len(databases) == 2
|
||||
internal, fixtures = databases
|
||||
assert internal["database_name"] == "_internal"
|
||||
assert internal["is_memory"] == 1
|
||||
assert internal["path"] is None
|
||||
assert isinstance(internal["schema_version"], int)
|
||||
assert fixtures["database_name"] == "fixtures"
|
||||
internal_db = await ensure_internal(ds_client)
|
||||
databases = await internal_db.execute("select * from catalog_databases")
|
||||
assert len(databases) == 1
|
||||
assert databases.rows[0]["database_name"] == "fixtures"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_internal_tables(ds_client):
|
||||
cookie = ds_client.actor_cookie({"id": "root"})
|
||||
tables = (
|
||||
await ds_client.get(
|
||||
"/_internal/tables.json?_shape=array", cookies={"ds_actor": cookie}
|
||||
)
|
||||
).json()
|
||||
internal_db = await ensure_internal(ds_client)
|
||||
tables = await internal_db.execute("select * from catalog_tables")
|
||||
assert len(tables) > 5
|
||||
table = tables[0]
|
||||
table = tables.rows[0]
|
||||
assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_internal_indexes(ds_client):
|
||||
cookie = ds_client.actor_cookie({"id": "root"})
|
||||
indexes = (
|
||||
await ds_client.get(
|
||||
"/_internal/indexes.json?_shape=array", cookies={"ds_actor": cookie}
|
||||
)
|
||||
).json()
|
||||
internal_db = await ensure_internal(ds_client)
|
||||
indexes = await internal_db.execute("select * from catalog_indexes")
|
||||
assert len(indexes) > 5
|
||||
index = indexes[0]
|
||||
index = indexes.rows[0]
|
||||
assert set(index.keys()) == {
|
||||
"partial",
|
||||
"name",
|
||||
|
@ -63,14 +43,10 @@ async def test_internal_indexes(ds_client):
|
|||
|
||||
@pytest.mark.asyncio
|
||||
async def test_internal_foreign_keys(ds_client):
|
||||
cookie = ds_client.actor_cookie({"id": "root"})
|
||||
foreign_keys = (
|
||||
await ds_client.get(
|
||||
"/_internal/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie}
|
||||
)
|
||||
).json()
|
||||
internal_db = await ensure_internal(ds_client)
|
||||
foreign_keys = await internal_db.execute("select * from catalog_foreign_keys")
|
||||
assert len(foreign_keys) > 5
|
||||
foreign_key = foreign_keys[0]
|
||||
foreign_key = foreign_keys.rows[0]
|
||||
assert set(foreign_key.keys()) == {
|
||||
"table",
|
||||
"seq",
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
"""
|
||||
Tests for the datasette.database.Database class
|
||||
"""
|
||||
|
||||
from datasette.app import Datasette
|
||||
from datasette.database import Database, Results, MultipleValues
|
||||
from datasette.utils.sqlite import sqlite3
|
||||
from datasette.utils import Column
|
||||
|
@ -64,6 +66,33 @@ async def test_execute_fn(db):
|
|||
assert 2 == await db.execute_fn(get_1_plus_1)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_execute_fn_transaction_false():
|
||||
datasette = Datasette(memory=True)
|
||||
db = datasette.add_memory_database("test_execute_fn_transaction_false")
|
||||
|
||||
def run(conn):
|
||||
try:
|
||||
with conn:
|
||||
conn.execute("create table foo (id integer primary key)")
|
||||
conn.execute("insert into foo (id) values (44)")
|
||||
# Table should exist
|
||||
assert (
|
||||
conn.execute(
|
||||
'select count(*) from sqlite_master where name = "foo"'
|
||||
).fetchone()[0]
|
||||
== 1
|
||||
)
|
||||
assert conn.execute("select id from foo").fetchall()[0][0] == 44
|
||||
raise ValueError("Cancel commit")
|
||||
except ValueError:
|
||||
pass
|
||||
# Row should NOT exist
|
||||
assert conn.execute("select count(*) from foo").fetchone()[0] == 0
|
||||
|
||||
await db.execute_write_fn(run, transaction=False)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"tables,exists",
|
||||
(
|
||||
|
@ -472,9 +501,8 @@ async def test_execute_write_has_correctly_prepared_connection(db):
|
|||
@pytest.mark.asyncio
|
||||
async def test_execute_write_fn_block_false(db):
|
||||
def write_fn(conn):
|
||||
with conn:
|
||||
conn.execute("delete from roadside_attractions where pk = 1;")
|
||||
row = conn.execute("select count(*) from roadside_attractions").fetchone()
|
||||
conn.execute("delete from roadside_attractions where pk = 1;")
|
||||
row = conn.execute("select count(*) from roadside_attractions").fetchone()
|
||||
return row[0]
|
||||
|
||||
task_id = await db.execute_write_fn(write_fn, block=False)
|
||||
|
@ -484,9 +512,8 @@ async def test_execute_write_fn_block_false(db):
|
|||
@pytest.mark.asyncio
|
||||
async def test_execute_write_fn_block_true(db):
|
||||
def write_fn(conn):
|
||||
with conn:
|
||||
conn.execute("delete from roadside_attractions where pk = 1;")
|
||||
row = conn.execute("select count(*) from roadside_attractions").fetchone()
|
||||
conn.execute("delete from roadside_attractions where pk = 1;")
|
||||
row = conn.execute("select count(*) from roadside_attractions").fetchone()
|
||||
return row[0]
|
||||
|
||||
new_count = await db.execute_write_fn(write_fn)
|
||||
|
@ -519,6 +546,70 @@ async def test_execute_write_fn_connection_exception(tmpdir, app_client):
|
|||
app_client.ds.remove_database("immutable-db")
|
||||
|
||||
|
||||
def table_exists(conn, name):
|
||||
return bool(
|
||||
conn.execute(
|
||||
"""
|
||||
with all_tables as (
|
||||
select name from sqlite_master where type = 'table'
|
||||
union all
|
||||
select name from temp.sqlite_master where type = 'table'
|
||||
)
|
||||
select 1 from all_tables where name = ?
|
||||
""",
|
||||
(name,),
|
||||
).fetchall(),
|
||||
)
|
||||
|
||||
|
||||
def table_exists_checker(name):
|
||||
def inner(conn):
|
||||
return table_exists(conn, name)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize("disable_threads", (False, True))
|
||||
async def test_execute_isolated(db, disable_threads):
|
||||
if disable_threads:
|
||||
ds = Datasette(memory=True, settings={"num_sql_threads": 0})
|
||||
db = ds.add_database(Database(ds, memory_name="test_num_sql_threads_zero"))
|
||||
|
||||
# Create temporary table in write
|
||||
await db.execute_write(
|
||||
"create temporary table created_by_write (id integer primary key)"
|
||||
)
|
||||
# Should stay visible to write connection
|
||||
assert await db.execute_write_fn(table_exists_checker("created_by_write"))
|
||||
|
||||
def create_shared_table(conn):
|
||||
conn.execute("create table shared (id integer primary key)")
|
||||
# And a temporary table that should not continue to exist
|
||||
conn.execute(
|
||||
"create temporary table created_by_isolated (id integer primary key)"
|
||||
)
|
||||
assert table_exists(conn, "created_by_isolated")
|
||||
# Also confirm that created_by_write does not exist
|
||||
return table_exists(conn, "created_by_write")
|
||||
|
||||
# shared should not exist
|
||||
assert not await db.execute_fn(table_exists_checker("shared"))
|
||||
|
||||
# Create it using isolated
|
||||
created_by_write_exists = await db.execute_isolated_fn(create_shared_table)
|
||||
assert not created_by_write_exists
|
||||
|
||||
# shared SHOULD exist now
|
||||
assert await db.execute_fn(table_exists_checker("shared"))
|
||||
|
||||
# created_by_isolated should not exist, even in write connection
|
||||
assert not await db.execute_write_fn(table_exists_checker("created_by_isolated"))
|
||||
|
||||
# ... and a second call to isolated should not see that connection either
|
||||
assert not await db.execute_isolated_fn(table_exists_checker("created_by_isolated"))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mtime_ns(db):
|
||||
assert isinstance(db.mtime_ns, int)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Ładowanie…
Reference in New Issue