Plugin configuration now lives in datasette.yaml/json

* Checkpoint, moving top-level plugin config to datasette.json
* Support database-level and table-level plugin configuration in datasette.yaml

Refs #2093
pull/2185/head
Alex Garcia 2023-09-13 14:06:25 -07:00 zatwierdzone przez GitHub
rodzic a4c96d01b2
commit b2ec8717c3
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
10 zmienionych plików z 217 dodań i 54 usunięć

Wyświetl plik

@ -368,7 +368,7 @@ class Datasette:
for key in config_settings:
if key not in DEFAULT_SETTINGS:
raise StartupError("Invalid setting '{}' in datasette.json".format(key))
self.config = config
# CLI settings should overwrite datasette.json settings
self._settings = dict(DEFAULT_SETTINGS, **(config_settings), **(settings or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
@ -674,15 +674,43 @@ class Datasette:
def plugin_config(self, plugin_name, database=None, table=None, fallback=True):
"""Return config for plugin, falling back from specified database/table"""
plugins = self.metadata(
"plugins", database=database, table=table, fallback=fallback
)
if plugins is None:
return None
plugin_config = plugins.get(plugin_name)
# Resolve any $file and $env keys
plugin_config = resolve_env_secrets(plugin_config, os.environ)
return plugin_config
if database is None and table is None:
config = self._plugin_config_top(plugin_name)
else:
config = self._plugin_config_nested(plugin_name, database, table, fallback)
return resolve_env_secrets(config, os.environ)
def _plugin_config_top(self, plugin_name):
"""Returns any top-level plugin configuration for the specified plugin."""
return ((self.config or {}).get("plugins") or {}).get(plugin_name)
def _plugin_config_nested(self, plugin_name, database, table=None, fallback=True):
"""Returns any database or table-level plugin configuration for the specified plugin."""
db_config = ((self.config or {}).get("databases") or {}).get(database)
# if there's no db-level configuration, then return early, falling back to top-level if needed
if not db_config:
return self._plugin_config_top(plugin_name) if fallback else None
db_plugin_config = (db_config.get("plugins") or {}).get(plugin_name)
if table:
table_plugin_config = (
((db_config.get("tables") or {}).get(table) or {}).get("plugins") or {}
).get(plugin_name)
# fallback to db_config or top-level config, in that order, if needed
if table_plugin_config is None and fallback:
return db_plugin_config or self._plugin_config_top(plugin_name)
return table_plugin_config
# fallback to top-level if needed
if db_plugin_config is None and fallback:
self._plugin_config_top(plugin_name)
return db_plugin_config
def app_css_hash(self):
if not hasattr(self, "_app_css_hash"):

Wyświetl plik

@ -1,10 +1,101 @@
.. _configuration:
Configuration
========
=============
Datasette offers many way to configure your Datasette instances: server settings, plugin configuration, authentication, and more.
Datasette offers several ways to configure your Datasette instances: server settings, plugin configuration, authentication, and more.
To facilitate this, You can provide a `datasette.yaml` configuration file to datasette with the ``--config``/ ``-c`` flag:
To facilitate this, You can provide a ``datasette.yaml`` configuration file to datasette with the ``--config``/ ``-c`` flag:
.. code-block:: bash
datasette mydatabase.db --config datasette.yaml
.. _configuration_reference:
``datasette.yaml`` reference
----------------------------
Here's a full example of all the valid configuration options that can exist inside ``datasette.yaml``.
.. tab:: YAML
.. code-block:: yaml
# Datasette settings block
settings:
default_page_size: 50
sql_time_limit_ms: 3500
max_returned_rows: 2000
# top-level plugin configuration
plugins:
datasette-my-plugin:
key: valueA
# Database and table-level configuration
databases:
your_db_name:
# plugin configuration for the your_db_name database
plugins:
datasette-my-plugin:
key: valueA
tables:
your_table_name:
# plugin configuration for the your_table_name table
# inside your_db_name database
plugins:
datasette-my-plugin:
key: valueB
.. _configuration_reference_settings:
Settings configuration
~~~~~~~~~~~~~~~~~~~~~~
:ref:`settings` can be configured in ``datasette.yaml`` with the ``settings`` key.
.. tab:: YAML
.. code-block:: yaml
# inside datasette.yaml
settings:
default_allow_sql: off
default_page_size: 50
.. _configuration_reference_plugins:
Plugin configuration
~~~~~~~~~~~~~~~~~~~~
Configuration for plugins can be defined inside ``datasette.yaml``. For top-level plugin configuration, use the ``plugins`` key.
.. tab:: YAML
.. code-block:: yaml
# inside datasette.yaml
plugins:
datasette-my-plugin:
key: my_value
For database level or table level plugin configuration, nest it under the appropriate place under ``databases``.
.. tab:: YAML
.. code-block:: yaml
# inside datasette.yaml
databases:
my_database:
# plugin configuration for the my_database database
plugins:
datasette-my-plugin:
key: my_value
my_other_database:
tables:
my_table:
# plugin configuration for the my_table table inside the my_other_database database
plugins:
datasette-my-plugin:
key: my_value

Wyświetl plik

@ -39,6 +39,7 @@ Contents
getting_started
installation
configuration
ecosystem
cli-reference
pages

Wyświetl plik

@ -296,7 +296,7 @@ The dictionary keys are the permission names - e.g. ``view-instance`` - and the
``table`` - None or string
The table the user is interacting with.
This method lets you read plugin configuration values that were set in ``metadata.json``. See :ref:`writing_plugins_configuration` for full details of how this method should be used.
This method lets you read plugin configuration values that were set in ``datasette.yaml``. See :ref:`writing_plugins_configuration` for full details of how this method should be used.
The return value will be the value from the configuration file - usually a dictionary.

Wyświetl plik

@ -909,7 +909,7 @@ Potential use-cases:
* Run some initialization code for the plugin
* Create database tables that a plugin needs on startup
* Validate the metadata configuration for a plugin on startup, and raise an error if it is invalid
* Validate the configuration for a plugin on startup, and raise an error if it is invalid
.. note::

Wyświetl plik

@ -184,7 +184,7 @@ This will return the ``{"latitude_column": "lat", "longitude_column": "lng"}`` i
If there is no configuration for that plugin, the method will return ``None``.
If it cannot find the requested configuration at the table layer, it will fall back to the database layer and then the root layer. For example, a user may have set the plugin configuration option like so:
If it cannot find the requested configuration at the table layer, it will fall back to the database layer and then the root layer. For example, a user may have set the plugin configuration option inside ``datasette.yaml`` like so:
.. [[[cog
from metadata_doc import metadata_example
@ -234,11 +234,10 @@ If it cannot find the requested configuration at the table layer, it will fall b
In this case, the above code would return that configuration for ANY table within the ``sf-trees`` database.
The plugin configuration could also be set at the top level of ``metadata.yaml``:
The plugin configuration could also be set at the top level of ``datasette.yaml``:
.. [[[cog
metadata_example(cog, {
"title": "This is the top-level title in metadata.json",
"plugins": {
"datasette-cluster-map": {
"latitude_column": "xlat",
@ -252,7 +251,6 @@ The plugin configuration could also be set at the top level of ``metadata.yaml``
.. code-block:: yaml
title: This is the top-level title in metadata.json
plugins:
datasette-cluster-map:
latitude_column: xlat
@ -264,7 +262,6 @@ The plugin configuration could also be set at the top level of ``metadata.yaml``
.. code-block:: json
{
"title": "This is the top-level title in metadata.json",
"plugins": {
"datasette-cluster-map": {
"latitude_column": "xlat",

Wyświetl plik

@ -41,7 +41,7 @@ def wait_until_responds(url, timeout=5.0, client=httpx, **kwargs):
@pytest_asyncio.fixture
async def ds_client():
from datasette.app import Datasette
from .fixtures import METADATA, PLUGINS_DIR
from .fixtures import CONFIG, METADATA, PLUGINS_DIR
global _ds_client
if _ds_client is not None:
@ -49,6 +49,7 @@ async def ds_client():
ds = Datasette(
metadata=METADATA,
config=CONFIG,
plugins_dir=PLUGINS_DIR,
settings={
"default_page_size": 50,

Wyświetl plik

@ -114,6 +114,7 @@ def make_app_client(
inspect_data=None,
static_mounts=None,
template_dir=None,
config=None,
metadata=None,
crossdb=False,
):
@ -158,6 +159,7 @@ def make_app_client(
memory=memory,
cors=cors,
metadata=metadata or METADATA,
config=config or CONFIG,
plugins_dir=PLUGINS_DIR,
settings=settings,
inspect_data=inspect_data,
@ -296,6 +298,33 @@ def generate_sortable_rows(num):
}
CONFIG = {
"plugins": {
"name-of-plugin": {"depth": "root"},
"env-plugin": {"foo": {"$env": "FOO_ENV"}},
"env-plugin-list": [{"in_a_list": {"$env": "FOO_ENV"}}],
"file-plugin": {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}},
},
"databases": {
"fixtures": {
"plugins": {"name-of-plugin": {"depth": "database"}},
"tables": {
"simple_primary_key": {
"plugins": {
"name-of-plugin": {
"depth": "table",
"special": "this-is-simple_primary_key",
}
},
},
"sortable": {
"plugins": {"name-of-plugin": {"depth": "table"}},
},
},
}
},
}
METADATA = {
"title": "Datasette Fixtures",
"description_html": 'An example SQLite database demonstrating Datasette. <a href="/login-as-root">Sign in as root user</a>',
@ -306,26 +335,13 @@ METADATA = {
"about": "About Datasette",
"about_url": "https://github.com/simonw/datasette",
"extra_css_urls": ["/static/extra-css-urls.css"],
"plugins": {
"name-of-plugin": {"depth": "root"},
"env-plugin": {"foo": {"$env": "FOO_ENV"}},
"env-plugin-list": [{"in_a_list": {"$env": "FOO_ENV"}}],
"file-plugin": {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}},
},
"databases": {
"fixtures": {
"description": "Test tables description",
"plugins": {"name-of-plugin": {"depth": "database"}},
"tables": {
"simple_primary_key": {
"description_html": "Simple <em>primary</em> key",
"title": "This <em>HTML</em> is escaped",
"plugins": {
"name-of-plugin": {
"depth": "table",
"special": "this-is-simple_primary_key",
}
},
},
"sortable": {
"sortable_columns": [
@ -334,7 +350,6 @@ METADATA = {
"sortable_with_nulls_2",
"text",
],
"plugins": {"name-of-plugin": {"depth": "table"}},
},
"no_primary_key": {"sortable_columns": [], "hidden": True},
"units": {"units": {"distance": "m", "frequency": "Hz"}},
@ -768,6 +783,7 @@ def assert_permissions_checked(datasette, actions):
type=click.Path(file_okay=True, dir_okay=False),
)
@click.argument("metadata", required=False)
@click.argument("config", required=False)
@click.argument(
"plugins_path", type=click.Path(file_okay=False, dir_okay=True), required=False
)
@ -782,7 +798,7 @@ def assert_permissions_checked(datasette, actions):
type=click.Path(file_okay=True, dir_okay=False),
help="Write out second test DB to this file",
)
def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
def cli(db_filename, config, metadata, plugins_path, recreate, extra_db_filename):
"""Write out the fixtures database used by Datasette's test suite"""
if metadata and not metadata.endswith(".json"):
raise click.ClickException("Metadata should end with .json")
@ -805,6 +821,10 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
with open(metadata, "w") as fp:
fp.write(json.dumps(METADATA, indent=4))
print(f"- metadata written to {metadata}")
if config:
with open(config, "w") as fp:
fp.write(json.dumps(CONFIG, indent=4))
print(f"- config written to {config}")
if plugins_path:
path = pathlib.Path(plugins_path)
if not path.exists():

Wyświetl plik

@ -238,6 +238,44 @@ def test_setting(args):
assert settings["default_page_size"] == 5
def test_plugin_s_overwrite():
runner = CliRunner()
plugins_dir = str(pathlib.Path(__file__).parent / "plugins")
result = runner.invoke(
cli,
[
"--plugins-dir",
plugins_dir,
"--get",
"/_memory.json?sql=select+prepare_connection_args()",
],
)
assert result.exit_code == 0, result.output
assert (
json.loads(result.output).get("rows")[0].get("prepare_connection_args()")
== 'database=_memory, datasette.plugin_config("name-of-plugin")=None'
)
result = runner.invoke(
cli,
[
"--plugins-dir",
plugins_dir,
"--get",
"/_memory.json?sql=select+prepare_connection_args()",
"-s",
"plugins.name-of-plugin",
"OVERRIDE",
],
)
assert result.exit_code == 0, result.output
assert (
json.loads(result.output).get("rows")[0].get("prepare_connection_args()")
== 'database=_memory, datasette.plugin_config("name-of-plugin")=OVERRIDE'
)
def test_setting_type_validation():
runner = CliRunner(mix_stderr=False)
result = runner.invoke(cli, ["--setting", "default_page_size", "dog"])

Wyświetl plik

@ -234,9 +234,6 @@ async def test_plugin_config(ds_client):
async def test_plugin_config_env(ds_client):
os.environ["FOO_ENV"] = "FROM_ENVIRONMENT"
assert {"foo": "FROM_ENVIRONMENT"} == ds_client.ds.plugin_config("env-plugin")
# Ensure secrets aren't visible in /-/metadata.json
metadata = await ds_client.get("/-/metadata.json")
assert {"foo": {"$env": "FOO_ENV"}} == metadata.json()["plugins"]["env-plugin"]
del os.environ["FOO_ENV"]
@ -246,11 +243,6 @@ async def test_plugin_config_env_from_list(ds_client):
assert [{"in_a_list": "FROM_ENVIRONMENT"}] == ds_client.ds.plugin_config(
"env-plugin-list"
)
# Ensure secrets aren't visible in /-/metadata.json
metadata = await ds_client.get("/-/metadata.json")
assert [{"in_a_list": {"$env": "FOO_ENV"}}] == metadata.json()["plugins"][
"env-plugin-list"
]
del os.environ["FOO_ENV"]
@ -259,11 +251,6 @@ async def test_plugin_config_file(ds_client):
with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp:
fp.write("FROM_FILE")
assert {"foo": "FROM_FILE"} == ds_client.ds.plugin_config("file-plugin")
# Ensure secrets aren't visible in /-/metadata.json
metadata = await ds_client.get("/-/metadata.json")
assert {"foo": {"$file": TEMP_PLUGIN_SECRET_FILE}} == metadata.json()["plugins"][
"file-plugin"
]
os.remove(TEMP_PLUGIN_SECRET_FILE)
@ -722,7 +709,7 @@ async def test_hook_register_routes(ds_client, path, body):
@pytest.mark.parametrize("configured_path", ("path1", "path2"))
def test_hook_register_routes_with_datasette(configured_path):
with make_app_client(
metadata={
config={
"plugins": {
"register-route-demo": {
"path": configured_path,
@ -741,7 +728,7 @@ def test_hook_register_routes_with_datasette(configured_path):
def test_hook_register_routes_override():
"Plugins can over-ride default paths such as /db/table"
with make_app_client(
metadata={
config={
"plugins": {
"register-route-demo": {
"path": "blah",
@ -1099,7 +1086,7 @@ async def test_hook_filters_from_request(ds_client):
@pytest.mark.parametrize("extra_metadata", (False, True))
async def test_hook_register_permissions(extra_metadata):
ds = Datasette(
metadata={
config={
"plugins": {
"datasette-register-permissions": {
"permissions": [
@ -1151,7 +1138,7 @@ async def test_hook_register_permissions_no_duplicates(duplicate):
if duplicate == "abbr":
abbr2 = "abbr1"
ds = Datasette(
metadata={
config={
"plugins": {
"datasette-register-permissions": {
"permissions": [
@ -1186,7 +1173,7 @@ async def test_hook_register_permissions_no_duplicates(duplicate):
@pytest.mark.asyncio
async def test_hook_register_permissions_allows_identical_duplicates():
ds = Datasette(
metadata={
config={
"plugins": {
"datasette-register-permissions": {
"permissions": [