Updated docs renaming config to settings

- config.html is now settings.html
- ConfigOption in app.py is now Setting
- updated documentation unit tests

Refs #1106
pull/1112/head
Simon Willison 2020-11-24 13:22:33 -08:00
rodzic 33eadb8782
commit 5a77f7a649
17 zmienionych plików z 131 dodań i 119 usunięć

Wyświetl plik

@ -53,11 +53,11 @@ jobs:
--plugins-dir=plugins \
--branch=$GITHUB_SHA \
--version-note=$GITHUB_SHA \
--extra-options="--config template_debug:1" \
--extra-options="--setting template_debug 1" \
--service=datasette-latest
# Deploy docs.db to a different service
datasette publish cloudrun docs.db \
--branch=$GITHUB_SHA \
--version-note=$GITHUB_SHA \
--extra-options="--config template_debug:1" \
--extra-options="--setting template_debug 1" \
--service=datasette-docs-latest

Wyświetl plik

@ -82,91 +82,85 @@ app_root = Path(__file__).parent.parent
MEMORY = object()
ConfigOption = collections.namedtuple("ConfigOption", ("name", "default", "help"))
CONFIG_OPTIONS = (
ConfigOption("default_page_size", 100, "Default page size for the table view"),
ConfigOption(
Setting = collections.namedtuple("Setting", ("name", "default", "help"))
SETTINGS = (
Setting("default_page_size", 100, "Default page size for the table view"),
Setting(
"max_returned_rows",
1000,
"Maximum rows that can be returned from a table or custom query",
),
ConfigOption(
Setting(
"num_sql_threads",
3,
"Number of threads in the thread pool for executing SQLite queries",
),
ConfigOption(
"sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds"
),
ConfigOption(
Setting("sql_time_limit_ms", 1000, "Time limit for a SQL query in milliseconds"),
Setting(
"default_facet_size", 30, "Number of values to return for requested facets"
),
ConfigOption(
"facet_time_limit_ms", 200, "Time limit for calculating a requested facet"
),
ConfigOption(
Setting("facet_time_limit_ms", 200, "Time limit for calculating a requested facet"),
Setting(
"facet_suggest_time_limit_ms",
50,
"Time limit for calculating a suggested facet",
),
ConfigOption(
Setting(
"hash_urls",
False,
"Include DB file contents hash in URLs, for far-future caching",
),
ConfigOption(
Setting(
"allow_facet",
True,
"Allow users to specify columns to facet using ?_facet= parameter",
),
ConfigOption(
Setting(
"allow_download",
True,
"Allow users to download the original SQLite database files",
),
ConfigOption("suggest_facets", True, "Calculate and display suggested facets"),
ConfigOption(
Setting("suggest_facets", True, "Calculate and display suggested facets"),
Setting(
"default_cache_ttl",
5,
"Default HTTP cache TTL (used in Cache-Control: max-age= header)",
),
ConfigOption(
Setting(
"default_cache_ttl_hashed",
365 * 24 * 60 * 60,
"Default HTTP cache TTL for hashed URL pages",
),
ConfigOption(
"cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)"
),
ConfigOption(
Setting("cache_size_kb", 0, "SQLite cache size in KB (0 == use SQLite default)"),
Setting(
"allow_csv_stream",
True,
"Allow .csv?_stream=1 to download all rows (ignoring max_returned_rows)",
),
ConfigOption(
Setting(
"max_csv_mb",
100,
"Maximum size allowed for CSV export in MB - set 0 to disable this limit",
),
ConfigOption(
Setting(
"truncate_cells_html",
2048,
"Truncate cells longer than this in HTML table view - set 0 to disable",
),
ConfigOption(
Setting(
"force_https_urls",
False,
"Force URLs in API output to always use https:// protocol",
),
ConfigOption(
Setting(
"template_debug",
False,
"Allow display of template debug information with ?_context=1",
),
ConfigOption("base_url", "/", "Datasette URLs should use this base path"),
Setting("base_url", "/", "Datasette URLs should use this base path"),
)
DEFAULT_CONFIG = {option.name: option.default for option in CONFIG_OPTIONS}
DEFAULT_SETTINGS = {option.name: option.default for option in SETTINGS}
async def favicon(request, send):
@ -270,7 +264,7 @@ class Datasette:
raise StartupError("config.json should be renamed to settings.json")
if config_dir and (config_dir / "settings.json").exists() and not config:
config = json.load((config_dir / "settings.json").open())
self._config = dict(DEFAULT_CONFIG, **(config or {}))
self._config = dict(DEFAULT_SETTINGS, **(config or {}))
self.renderers = {} # File extension -> (renderer, can_render) functions
self.version_note = version_note
self.executor = futures.ThreadPoolExecutor(
@ -358,7 +352,7 @@ class Datasette:
def config_dict(self):
# Returns a fully resolved config dictionary, useful for templates
return {option.name: self.config(option.name) for option in CONFIG_OPTIONS}
return {option.name: self.config(option.name) for option in SETTINGS}
def metadata(self, key=None, database=None, table=None, fallback=True):
"""

Wyświetl plik

@ -12,7 +12,7 @@ from subprocess import call
import sys
from runpy import run_module
import webbrowser
from .app import Datasette, DEFAULT_CONFIG, CONFIG_OPTIONS, pm
from .app import Datasette, DEFAULT_SETTINGS, SETTINGS, pm
from .utils import (
StartupError,
check_connection,
@ -39,7 +39,7 @@ class Config(click.ParamType):
self.fail(f'"{config}" should be name:value', param, ctx)
return
name, value = config.split(":", 1)
if name not in DEFAULT_CONFIG:
if name not in DEFAULT_SETTINGS:
self.fail(
f"{name} is not a valid option (--help-config to see all)",
param,
@ -47,7 +47,7 @@ class Config(click.ParamType):
)
return
# Type checking
default = DEFAULT_CONFIG[name]
default = DEFAULT_SETTINGS[name]
if isinstance(default, bool):
try:
return name, value_as_boolean(value)
@ -72,7 +72,7 @@ class Setting(CompositeParamType):
def convert(self, config, param, ctx):
name, value = config
if name not in DEFAULT_CONFIG:
if name not in DEFAULT_SETTINGS:
self.fail(
f"{name} is not a valid option (--help-config to see all)",
param,
@ -80,7 +80,7 @@ class Setting(CompositeParamType):
)
return
# Type checking
default = DEFAULT_CONFIG[name]
default = DEFAULT_SETTINGS[name]
if isinstance(default, bool):
try:
return name, value_as_boolean(value)
@ -432,7 +432,7 @@ def serve(
formatter.write_dl(
[
(option.name, f"{option.help} (default={option.default})")
for option in CONFIG_OPTIONS
for option in SETTINGS
]
)
click.echo(formatter.getvalue())

Wyświetl plik

@ -49,7 +49,7 @@ The new :ref:`internals_datasette_urls` family of methods can be used to generat
Running Datasette behind a proxy
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The :ref:`config_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache.
The :ref:`setting_base_url` configuration option is designed to help run Datasette on a specific path behind a proxy - for example if you want to run an instance of Datasette at ``/my-datasette/`` within your existing site's URL hierarchy, proxied behind nginx or Apache.
Support for this configuration option has been greatly improved (`#1023 <https://github.com/simonw/datasette/issues/1023>`__), and guidelines for using it are now available in a new documentation section on :ref:`deploying_proxy`. (`#1027 <https://github.com/simonw/datasette/issues/1027>`__)
@ -353,9 +353,9 @@ Signed values and secrets
Both flash messages and user authentication needed a way to sign values and set signed cookies. Two new methods are now available for plugins to take advantage of this mechanism: :ref:`datasette_sign` and :ref:`datasette_unsign`.
Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`config_secret` for more details.
Datasette will generate a secret automatically when it starts up, but to avoid resetting the secret (and hence invalidating any cookies) every time the server restarts you should set your own secret. You can pass a secret to Datasette using the new ``--secret`` option or with a ``DATASETTE_SECRET`` environment variable. See :ref:`settings_secret` for more details.
You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`config_publish_secrets`.
You can also set a secret when you deploy Datasette using ``datasette publish`` or ``datasette package`` - see :ref:`settings_publish_secrets`.
Plugins can now sign values and verify their signatures using the :ref:`datasette.sign() <datasette_sign>` and :ref:`datasette.unsign() <datasette_unsign>` methods.
@ -450,7 +450,7 @@ A small release which provides improved internal methods for use in plugins, alo
You can now create :ref:`custom pages <custom_pages>` within your Datasette instance using a custom template file. For example, adding a template file called ``templates/pages/about.html`` will result in a new page being served at ``/about`` on your instance. See the :ref:`custom pages documentation <custom_pages>` for full details, including how to return custom HTTP headers, redirects and status codes. (`#648 <https://github.com/simonw/datasette/issues/648>`__)
:ref:`config_dir` (`#731 <https://github.com/simonw/datasette/issues/731>`__) allows you to define a custom Datasette instance as a directory. So instead of running the following::
:ref:`settings_dir` (`#731 <https://github.com/simonw/datasette/issues/731>`__) allows you to define a custom Datasette instance as a directory. So instead of running the following::
$ datasette one.db two.db \
--metadata.json \
@ -480,7 +480,7 @@ Also in this release:
* Datasette :ref:`metadata` can now be provided as a YAML file as an optional alternative to JSON. See :ref:`metadata_yaml`. (`#713 <https://github.com/simonw/datasette/issues/713>`__)
* Removed support for ``datasette publish now``, which used the the now-retired Zeit Now v1 hosting platform. A new plugin, `datasette-publish-now <https://github.com/simonw/datasette-publish-now>`__, can be installed to publish data to Zeit (`now Vercel <https://vercel.com/blog/zeit-is-now-vercel>`__) Now v2. (`#710 <https://github.com/simonw/datasette/issues/710>`__)
* Fixed a bug where the ``extra_template_vars(request, view_name)`` plugin hook was not receiving the correct ``view_name``. (`#716 <https://github.com/simonw/datasette/issues/716>`__)
* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`config_template_debug`). (`#693 <https://github.com/simonw/datasette/issues/693>`__)
* Variables added to the template context by the ``extra_template_vars()`` plugin hook are now shown in the ``?_context=1`` debugging mode (see :ref:`settings_template_debug`). (`#693 <https://github.com/simonw/datasette/issues/693>`__)
* Fixed a bug where the "templates considered" HTML comment was no longer being displayed. (`#689 <https://github.com/simonw/datasette/issues/689>`__)
* Fixed a ``datasette publish`` bug where ``--plugin-secret`` would over-ride plugin configuration in the provided ``metadata.json`` file. (`#724 <https://github.com/simonw/datasette/issues/724>`__)
* Added a new CSS class for customizing the canned query page. (`#727 <https://github.com/simonw/datasette/issues/727>`__)
@ -490,7 +490,7 @@ Also in this release:
0.39 (2020-03-24)
-----------------
* New :ref:`config_base_url` configuration setting for serving up the correct links while running Datasette under a different URL prefix. (`#394 <https://github.com/simonw/datasette/issues/394>`__)
* New :ref:`setting_base_url` configuration setting for serving up the correct links while running Datasette under a different URL prefix. (`#394 <https://github.com/simonw/datasette/issues/394>`__)
* New metadata settings ``"sort"`` and ``"sort_desc"`` for setting the default sort order for a table. See :ref:`metadata_default_sort`. (`#702 <https://github.com/simonw/datasette/issues/702>`__)
* Sort direction arrow now displays by default on the primary key. This means you only have to click once (not twice) to sort in reverse order. (`#677 <https://github.com/simonw/datasette/issues/677>`__)
* New ``await Request(scope, receive).post_vars()`` method for accessing POST form variables. (`#700 <https://github.com/simonw/datasette/issues/700>`__)
@ -565,7 +565,7 @@ Also in this release:
* asyncio task information is now included on the ``/-/threads`` debug page
* Bumped Uvicorn dependency 0.11
* You can now use ``--port 0`` to listen on an available port
* New :ref:`config_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 <https://github.com/simonw/datasette/issues/654>`__)
* New :ref:`settings_template_debug` setting for debugging templates, e.g. https://latest.datasette.io/fixtures/roadside_attractions?_context=1 (`#654 <https://github.com/simonw/datasette/issues/654>`__)
.. _v0_32:
@ -1000,7 +1000,7 @@ Check out the :ref:`CSV export documentation <csv_export>` for more details, or
try the feature out on
https://fivethirtyeight.datasettes.com/fivethirtyeight/bechdel%2Fmovies
If your table has more than :ref:`config_max_returned_rows` (default 1,000)
If your table has more than :ref:`settings_max_returned_rows` (default 1,000)
Datasette provides the option to *stream all rows*. This option takes advantage
of async Python and Datasette's efficient :ref:`pagination <pagination>` to
iterate through the entire matching result set and stream it back as a
@ -1020,7 +1020,7 @@ table, using the new ``_labels=on`` querystring option. See
New configuration settings
~~~~~~~~~~~~~~~~~~~~~~~~~~
Datasette's :ref:`config` now also supports boolean settings. A number of new
Datasette's :ref:`settings` now also supports boolean settings. A number of new
configuration options have been added:
* ``num_sql_threads`` - the number of threads used to execute SQLite queries. Defaults to 3.

Wyświetl plik

@ -23,7 +23,7 @@ file, which looks like this and has the following options:
the ``city_id`` column is accompanied by a ``city_id_label`` column.
* **stream all rows** - by default CSV files only contain the first
:ref:`config_max_returned_rows` records. This option will cause Datasette to
:ref:`settings_max_returned_rows` records. This option will cause Datasette to
loop through every matching record and return them as a single CSV file.
You can try that out on https://latest.datasette.io/fixtures/facetable?_size=4
@ -40,9 +40,9 @@ Since databases can get pretty large, by default this option is capped at 100MB
if a table returns more than 100MB of data the last line of the CSV will be a
truncation error message.
You can increase or remove this limit using the :ref:`config_max_csv_mb` config
You can increase or remove this limit using the :ref:`settings_max_csv_mb` config
setting. You can also disable the CSV export feature entirely using
:ref:`config_allow_csv_stream`.
:ref:`settings_allow_csv_stream`.
A note on URLs
--------------

Wyświetl plik

@ -58,7 +58,7 @@ Add a random value for the ``DATASETTE_SECRET`` - this will be used to sign Data
$ python3 -c 'import secrets; print(secrets.token_hex(32))'
This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`config_dir` for details.
This configuration will run Datasette against all database files contained in the ``/home/ubunt/datasette-root`` directory. If that directory contains a ``metadata.yml`` (or ``.json``) file or a ``templates/`` or ``plugins/`` sub-directory those will automatically be loaded by Datasette - see :ref:`settings_dir` for details.
You can start the Datasette process running using the following::
@ -101,7 +101,7 @@ The ``Procfile`` lets the hosting platform know how to run the command that serv
web: datasette . -h 0.0.0.0 -p $PORT --cors
The ``$PORT`` environment variable is provided by the hosting platform. ``--cors`` enables CORS requests from JavaScript running on other websites to your domain - omit this if you don't want to allow CORS. You can add additional Datasette :ref:`config` options here too.
The ``$PORT`` environment variable is provided by the hosting platform. ``--cors`` enables CORS requests from JavaScript running on other websites to your domain - omit this if you don't want to allow CORS. You can add additional Datasette :ref:`settings` options here too.
These two files should be enough to deploy Datasette on any host that supports buildpacks. Datasette will serve any SQLite files that are included in the root directory of the application.
@ -118,9 +118,9 @@ Running Datasette behind a proxy
You may wish to run Datasette behind an Apache or nginx proxy, using a path within your existing site.
You can use the :ref:`config_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this::
You can use the :ref:`setting_base_url` configuration setting to tell Datasette to serve traffic with a specific URL prefix. For example, you could run Datasette like this::
datasette my-database.db --config base_url:/my-datasette/ -p 8009
datasette my-database.db --setting base_url /my-datasette/ -p 8009
This will run Datasette with the following URLs:

Wyświetl plik

@ -51,7 +51,7 @@ Contents
full_text_search
spatialite
metadata
config
settings
introspection
custom_templates
plugins

Wyświetl plik

@ -350,7 +350,7 @@ Returns the absolute URL for the given path, including the protocol and host. Fo
absolute_url = datasette.absolute_url(request, "/dbname/table.json")
# Would return "http://localhost:8001/dbname/table.json"
The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`config_force_https_urls` configuration setting is taken into account.
The current request object is used to determine the hostname and protocol that should be used for the returned URL. The :ref:`settings_force_https_urls` configuration setting is taken into account.
.. _internals_datasette_client:
@ -397,7 +397,7 @@ These methods can be used with :ref:`internals_datasette_urls` - for example:
)
).json()
``datasette.client`` methods automatically take the current :ref:`config_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path.
``datasette.client`` methods automatically take the current :ref:`setting_base_url` setting into account, whether or not you use the ``datasette.urls`` family of methods to construct the path.
For documentation on available ``**kwargs`` options and the shape of the HTTPX Response object refer to the `HTTPX Async documentation <https://www.python-httpx.org/async/>`__.
@ -406,7 +406,7 @@ For documentation on available ``**kwargs`` options and the shape of the HTTPX R
datasette.urls
--------------
The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`config_base_url` configuration setting that might be in effect.
The ``datasette.urls`` object contains methods for building URLs to pages within Datasette. Plugins should use this to link to pages, since these methods take into account any :ref:`setting_base_url` configuration setting that might be in effect.
``datasette.urls.instance(format=None)``
Returns the URL to the Datasette instance root page. This is usually ``"/"``.

Wyświetl plik

@ -92,7 +92,7 @@ Add ``?all=1`` to include details of the default plugins baked into Datasette.
/-/settings
-----------
Shows the :ref:`config` options for this instance of Datasette. `Settings example <https://fivethirtyeight.datasettes.com/-/settings>`_:
Shows the :ref:`settings` for this instance of Datasette. `Settings example <https://fivethirtyeight.datasettes.com/-/settings>`_:
.. code-block:: json

Wyświetl plik

@ -66,7 +66,7 @@ Row
Every row in every Datasette table has its own URL. This means individual records can be linked to directly.
Table cells with extremely long text contents are truncated on the table view according to the :ref:`config_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page.
Table cells with extremely long text contents are truncated on the table view according to the :ref:`settings_truncate_cells_html` setting. If a cell has been truncated the full length version of that cell will be available on the row page.
Rows which are the targets of foreign key references from other tables will show a link to a filtered search for all records that reference that row. Here's an example from the Registers of Members Interests database:

Wyświetl plik

@ -56,7 +56,7 @@ Using a caching proxy in this way could enable a Datasette-backed visualization
Datasette's integration with HTTP caches can be enabled using a combination of configuration options and querystring arguments.
The :ref:`config_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely.
The :ref:`settings_default_cache_ttl` setting sets the default HTTP cache TTL for all Datasette pages. This is 5 seconds unless you change it - you can set it to 0 if you wish to disable HTTP caching entirely.
You can also change the cache timeout on a per-request basis using the ``?_ttl=10`` querystring parameter. This can be useful when you are working with the Datasette JSON API - you may decide that a specific query can be cached for a longer time, or maybe you need to set ``?_ttl=0`` for some requests for example if you are running a SQL ``order by random()`` query.
@ -65,9 +65,9 @@ Hashed URL mode
When you open a database file in immutable mode using the ``-i`` option, Datasette calculates a SHA-256 hash of the contents of that file on startup. This content hash can then optionally be used to create URLs that are guaranteed to change if the contents of the file changes in the future. This results in URLs that can then be cached indefinitely by both browsers and caching proxies - an enormous potential performance optimization.
You can enable these hashed URLs in two ways: using the :ref:`config_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request).
You can enable these hashed URLs in two ways: using the :ref:`settings_hash_urls` configuration setting (which affects all requests to Datasette) or via the ``?_hash=1`` querystring parameter (which only applies to the current request).
With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`config_default_cache_ttl_hashed` which defaults to 365 days.
With hashed URLs enabled, any request to e.g. ``/mydatabase/mytable`` will 302 redirect to ``mydatabase-455fe3a/mytable``. The URL containing the hash will be served with a very long cache expire header - configured using :ref:`settings_default_cache_ttl_hashed` which defaults to 365 days.
Since these responses are cached for a long time, you may wish to build API clients against the non-hashed version of these URLs. These 302 redirects are served extremely quickly, so this should still be a performant way to work against the Datasette API.

Wyświetl plik

@ -1020,7 +1020,7 @@ This example adds a new menu item but only if the signed in user is ``"root"``:
{"href": datasette.urls.path("/-/edit-schema"), "label": "Edit schema"},
]
Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`config_base_url` setting into account.
Using :ref:`internals_datasette_urls` here ensures that links in the menu will take the :ref:`setting_base_url` setting into account.
.. _plugin_hook_table_actions:

Wyświetl plik

@ -135,7 +135,7 @@ If you have docker installed (e.g. using `Docker for Mac <https://www.docker.com
Here's example output for the package command::
$ datasette package parlgov.db --extra-options="--config sql_time_limit_ms:2500"
$ datasette package parlgov.db --extra-options="--setting sql_time_limit_ms 2500"
Sending build context to Docker daemon 4.459MB
Step 1/7 : FROM python:3
---> 79e1dc9af1c1
@ -154,7 +154,7 @@ Here's example output for the package command::
Step 6/7 : EXPOSE 8001
---> Using cache
---> 8e83844b0fed
Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --config sql_time_limit_ms:2500
Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --setting sql_time_limit_ms 2500
---> Using cache
---> 1bd380ea8af3
Successfully built 1bd380ea8af3

Wyświetl plik

@ -1,20 +1,19 @@
.. _config:
.. _settings:
Configuration
=============
Settings
========
Using \-\-config
----------------
Using \-\-setting
-----------------
Datasette provides a number of configuration options. These can be set using the ``--config name:value`` option to ``datasette serve``.
Datasette supports a number of settings. These can be set using the ``--setting name value`` option to ``datasette serve``.
You can set multiple configuration options at once like this::
You can set multiple settings at once like this::
datasette mydatabase.db \
--config default_page_size:50 \
--config sql_time_limit_ms:3500 \
--config max_returned_rows:2000
--setting default_page_size 50 \
--setting sql_time_limit_ms 3500 \
--setting max_returned_rows 2000
.. _config_dir:
@ -60,12 +59,16 @@ Settings
The following options can be set using ``--setting name value``, or by storing them in the ``settings.json`` file for use with :ref:`config_dir`.
.. _setting_default_page_size:
default_page_size
~~~~~~~~~~~~~~~~~
The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--config`` like so::
The default number of rows returned by the table page. You can over-ride this on a per-page basis using the ``?_size=80`` querystring parameter, provided you do not specify a value higher than the ``max_returned_rows`` setting. You can set this default using ``--setting`` like so::
datasette mydatabase.db --config default_page_size:50
datasette mydatabase.db --setting default_page_size 50
.. _setting_sql_time_limit_ms:
sql_time_limit_ms
~~~~~~~~~~~~~~~~~
@ -74,7 +77,7 @@ By default, queries have a time limit of one second. If a query takes longer tha
If this time limit is too short for you, you can customize it using the ``sql_time_limit_ms`` limit - for example, to increase it to 3.5 seconds::
datasette mydatabase.db --config sql_time_limit_ms:3500
datasette mydatabase.db --setting sql_time_limit_ms 3500
You can optionally set a lower time limit for an individual query using the ``?_timelimit=100`` querystring argument::
@ -82,7 +85,7 @@ You can optionally set a lower time limit for an individual query using the ``?_
This would set the time limit to 100ms for that specific query. This feature is useful if you are working with databases of unknown size and complexity - a query that might make perfect sense for a smaller table could take too long to execute on a table with millions of rows. By setting custom time limits you can execute queries "optimistically" - e.g. give me an exact count of rows matching this query but only if it takes less than 100ms to calculate.
.. _config_max_returned_rows:
.. _setting_max_returned_rows:
max_returned_rows
~~~~~~~~~~~~~~~~~
@ -91,7 +94,9 @@ Datasette returns a maximum of 1,000 rows of data at a time. If you execute a qu
You can increase or decrease this limit like so::
datasette mydatabase.db --config max_returned_rows:2000
datasette mydatabase.db --setting max_returned_rows 2000
.. _setting_num_sql_threads:
num_sql_threads
~~~~~~~~~~~~~~~
@ -100,7 +105,9 @@ Maximum number of threads in the thread pool Datasette uses to execute SQLite qu
::
datasette mydatabase.db --config num_sql_threads:10
datasette mydatabase.db --setting num_sql_threads 10
.. _setting_allow_facet:
allow_facet
~~~~~~~~~~~
@ -111,21 +118,27 @@ This is enabled by default. If disabled, facets will still be displayed if they
Here's how to disable this feature::
datasette mydatabase.db --config allow_facet:off
datasette mydatabase.db --setting allow_facet off
.. _setting_default_facet_size:
default_facet_size
~~~~~~~~~~~~~~~~~~
The default number of unique rows returned by :ref:`facets` is 30. You can customize it like this::
datasette mydatabase.db --config default_facet_size:50
datasette mydatabase.db --setting default_facet_size 50
.. _setting_facet_time_limit_ms:
facet_time_limit_ms
~~~~~~~~~~~~~~~~~~~
This is the time limit Datasette allows for calculating a facet, which defaults to 200ms::
datasette mydatabase.db --config facet_time_limit_ms:1000
datasette mydatabase.db --setting facet_time_limit_ms 1000
.. _setting_facet_suggest_time_limit_ms:
facet_suggest_time_limit_ms
~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -134,23 +147,27 @@ When Datasette calculates suggested facets it needs to run a SQL query for every
You can increase this time limit like so::
datasette mydatabase.db --config facet_suggest_time_limit_ms:500
datasette mydatabase.db --setting facet_suggest_time_limit_ms 500
.. _setting_suggest_facets:
suggest_facets
~~~~~~~~~~~~~~
Should Datasette calculate suggested facets? On by default, turn this off like so::
datasette mydatabase.db --config suggest_facets:off
datasette mydatabase.db --setting suggest_facets off
.. _setting_allow_download:
allow_download
~~~~~~~~~~~~~~
Should users be able to download the original SQLite database using a link on the database index page? This is turned on by default - to disable database downloads, use the following::
datasette mydatabase.db --config allow_download:off
datasette mydatabase.db --setting allow_download off
.. _config_default_cache_ttl:
.. _setting_default_cache_ttl:
default_cache_ttl
~~~~~~~~~~~~~~~~~
@ -159,19 +176,20 @@ Default HTTP caching max-age header in seconds, used for ``Cache-Control: max-ag
::
datasette mydatabase.db --config default_cache_ttl:60
datasette mydatabase.db --setting default_cache_ttl 60
.. _config_default_cache_ttl_hashed:
.. _setting_default_cache_ttl_hashed:
default_cache_ttl_hashed
~~~~~~~~~~~~~~~~~~~~~~~~
Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism <config_hash_urls>`. Defaults to 365 days (31536000 seconds).
Default HTTP caching max-age for responses served using using the :ref:`hashed-urls mechanism <setting_hash_urls>`. Defaults to 365 days (31536000 seconds).
::
datasette mydatabase.db --config default_cache_ttl_hashed:10000
datasette mydatabase.db --setting default_cache_ttl_hashed 10000
.. _setting_cache_size_kb:
cache_size_kb
~~~~~~~~~~~~~
@ -180,9 +198,9 @@ Sets the amount of memory SQLite uses for its `per-connection cache <https://www
::
datasette mydatabase.db --config cache_size_kb:5000
datasette mydatabase.db --setting cache_size_kb 5000
.. _config_allow_csv_stream:
.. _setting_allow_csv_stream:
allow_csv_stream
~~~~~~~~~~~~~~~~
@ -193,9 +211,9 @@ file. This is turned on by default - you can turn it off like this:
::
datasette mydatabase.db --config allow_csv_stream:off
datasette mydatabase.db --setting allow_csv_stream off
.. _config_max_csv_mb:
.. _setting_max_csv_mb:
max_csv_mb
~~~~~~~~~~
@ -205,9 +223,9 @@ You can disable the limit entirely by settings this to 0:
::
datasette mydatabase.db --config max_csv_mb:0
datasette mydatabase.db --setting max_csv_mb 0
.. _config_truncate_cells_html:
.. _setting_truncate_cells_html:
truncate_cells_html
~~~~~~~~~~~~~~~~~~~
@ -218,9 +236,9 @@ HTML page. Set this to 0 to disable truncation.
::
datasette mydatabase.db --config truncate_cells_html:0
datasette mydatabase.db --setting truncate_cells_html 0
.. _config_force_https_urls:
.. _setting_force_https_urls:
force_https_urls
~~~~~~~~~~~~~~~~
@ -231,9 +249,9 @@ HTTP but is served to the outside world via a proxy that enables HTTPS.
::
datasette mydatabase.db --config force_https_urls:1
datasette mydatabase.db --setting force_https_urls 1
.. _config_hash_urls:
.. _setting_hash_urls:
hash_urls
~~~~~~~~~
@ -247,9 +265,9 @@ itself will result in new, uncachcacheed URL paths.
::
datasette mydatabase.db --config hash_urls:1
datasette mydatabase.db --setting hash_urls 1
.. _config_template_debug:
.. _setting_template_debug:
template_debug
~~~~~~~~~~~~~~
@ -258,7 +276,7 @@ This setting enables template context debug mode, which is useful to help unders
Enable it like this::
datasette mydatabase.db --config template_debug:1
datasette mydatabase.db --setting template_debug 1
Now you can add ``?_context=1`` or ``&_context=1`` to any Datasette page to see the context that was passed to that template.
@ -268,7 +286,7 @@ Some examples:
* https://latest.datasette.io/fixtures?_context=1
* https://latest.datasette.io/fixtures/roadside_attractions?_context=1
.. _config_base_url:
.. _setting_base_url:
base_url
~~~~~~~~
@ -279,9 +297,9 @@ For example, if you are sending traffic from ``https://www.example.com/tools/dat
You can do that like so::
datasette mydatabase.db --config base_url:/tools/datasette/
datasette mydatabase.db --setting base_url /tools/datasette/
.. _config_secret:
.. _setting_secret:
Configuring the secret
----------------------
@ -308,7 +326,7 @@ One way to generate a secure random secret is to use Python like this::
Plugin authors make use of this signing mechanism in their plugins using :ref:`datasette_sign` and :ref:`datasette_unsign`.
.. _config_publish_secrets:
.. _setting_publish_secrets:
Using secrets with datasette publish
------------------------------------

Wyświetl plik

@ -164,7 +164,7 @@ To see a more interesting example, try ordering the records with the longest geo
datasette rivers-database.db \
--load-extension=/usr/local/lib/mod_spatialite.dylib \
--config sql_time_limit_ms:10000
--setting sql_time_limit_ms 10000
Now try the following query:

Wyświetl plik

@ -240,7 +240,7 @@ Building URLs within plugins
Plugins that define their own custom user interface elements may need to link to other pages within Datasette.
This can be a bit tricky if the Datasette instance is using the :ref:`config_base_url` configuration setting to run behind a proxy, since that can cause Datasette's URLs to include an additional prefix.
This can be a bit tricky if the Datasette instance is using the :ref:`setting_base_url` configuration setting to run behind a proxy, since that can cause Datasette's URLs to include an additional prefix.
The ``datasette.urls`` object provides internal methods for correctly generating URLs to different pages within Datasette, taking any ``base_url`` configuration into account.

Wyświetl plik

@ -24,13 +24,13 @@ def get_labels(filename):
@pytest.fixture(scope="session")
def config_headings():
return get_headings((docs_path / "config.rst").open().read(), "~")
def settings_headings():
return get_headings((docs_path / "settings.rst").open().read(), "~")
@pytest.mark.parametrize("config", app.CONFIG_OPTIONS)
def test_config_options_are_documented(config_headings, config):
assert config.name in config_headings
@pytest.mark.parametrize("setting", app.SETTINGS)
def test_settings_are_documented(settings_headings, setting):
assert setting.name in settings_headings
@pytest.mark.parametrize(