diff --git a/datasette/app.py b/datasette/app.py index b0b8f041..373b3e95 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -75,6 +75,7 @@ from .utils import ( format_bytes, module_from_path, move_plugins, + move_table_config, parse_metadata, resolve_env_secrets, resolve_routes, @@ -346,7 +347,9 @@ class Datasette: # Move any "plugins" settings from metadata to config - updates them in place metadata = metadata or {} config = config or {} - move_plugins(metadata, config) + metadata, config = move_plugins(metadata, config) + # Now migrate any known table configuration settings over as well + metadata, config = move_table_config(metadata, config) self._metadata_local = metadata or {} self.sqlite_extensions = [] @@ -1202,10 +1205,11 @@ class Datasette: def _actor(self, request): return {"actor": request.actor} - async def table_config(self, database, table): - """Fetch table-specific metadata.""" + async def table_config(self, database: str, table: str) -> dict: + """Return dictionary of configuration for specified table""" return ( - (self.metadata("databases") or {}) + (self.config or {}) + .get("databases", {}) .get(database, {}) .get("tables", {}) .get(table, {}) diff --git a/datasette/database.py b/datasette/database.py index 1c1b3e1b..fba81496 100644 --- a/datasette/database.py +++ b/datasette/database.py @@ -487,13 +487,11 @@ class Database: ) ).rows ] - # Add any from metadata.json - db_metadata = self.ds.metadata(database=self.name) - if "tables" in db_metadata: + # Add any tables marked as hidden in config + db_config = self.ds.config.get("databases", {}).get(self.name, {}) + if "tables" in db_config: hidden_tables += [ - t - for t in db_metadata["tables"] - if db_metadata["tables"][t].get("hidden") + t for t in db_config["tables"] if db_config["tables"][t].get("hidden") ] # Also mark as hidden any tables which start with the name of a hidden table # e.g. "searchable_fts" implies "searchable_fts_content" should be hidden diff --git a/datasette/utils/__init__.py b/datasette/utils/__init__.py index 4c940645..fcaebe3f 100644 --- a/datasette/utils/__init__.py +++ b/datasette/utils/__init__.py @@ -2,6 +2,7 @@ import asyncio from contextlib import contextmanager import click from collections import OrderedDict, namedtuple, Counter +import copy import base64 import hashlib import inspect @@ -17,7 +18,7 @@ import time import types import secrets import shutil -from typing import Iterable +from typing import Iterable, Tuple import urllib import yaml from .shutil_backport import copytree @@ -1290,11 +1291,24 @@ def make_slot_function(name, datasette, request, **kwargs): return inner -def move_plugins(source, destination): +def prune_empty_dicts(d: dict): + """ + Recursively prune all empty dictionaries from a given dictionary. + """ + for key, value in list(d.items()): + if isinstance(value, dict): + prune_empty_dicts(value) + if value == {}: + d.pop(key, None) + + +def move_plugins(source: dict, destination: dict) -> Tuple[dict, dict]: """ Move 'plugins' keys from source to destination dictionary. Creates hierarchy in destination if needed. After moving, recursively remove any keys in the source that are left empty. """ + source = copy.deepcopy(source) + destination = copy.deepcopy(destination) def recursive_move(src, dest, path=None): if path is None: @@ -1316,18 +1330,49 @@ def move_plugins(source, destination): if not value: src.pop(key, None) - def prune_empty_dicts(d): - """ - Recursively prune all empty dictionaries from a given dictionary. - """ - for key, value in list(d.items()): - if isinstance(value, dict): - prune_empty_dicts(value) - if value == {}: - d.pop(key, None) - recursive_move(source, destination) prune_empty_dicts(source) + return source, destination + + +_table_config_keys = ( + "hidden", + "sort", + "sort_desc", + "size", + "sortable_columns", + "label_column", + "facets", + "fts_table", + "fts_pk", + "searchmode", + "units", +) + + +def move_table_config(metadata: dict, config: dict): + """ + Move all known table configuration keys from metadata to config. + """ + if "databases" not in metadata: + return metadata, config + metadata = copy.deepcopy(metadata) + config = copy.deepcopy(config) + for database_name, database in metadata["databases"].items(): + if "tables" not in database: + continue + for table_name, table in database["tables"].items(): + for key in _table_config_keys: + if key in table: + config.setdefault("databases", {}).setdefault( + database_name, {} + ).setdefault("tables", {}).setdefault(table_name, {})[ + key + ] = table.pop( + key + ) + prune_empty_dicts(metadata) + return metadata, config def redact_keys(original: dict, key_patterns: Iterable) -> dict: diff --git a/datasette/views/table.py b/datasette/views/table.py index 2b5b7c24..50d2b3c2 100644 --- a/datasette/views/table.py +++ b/datasette/views/table.py @@ -142,11 +142,11 @@ async def display_columns_and_rows( """Returns columns, rows for specified table - including fancy foreign key treatment""" sortable_columns = sortable_columns or set() db = datasette.databases[database_name] - table_metadata = await datasette.table_config(database_name, table_name) - column_descriptions = table_metadata.get("columns") or {} + column_descriptions = datasette.metadata("columns", database_name, table_name) or {} column_details = { col.name: col for col in await db.table_column_details(table_name) } + table_config = await datasette.table_config(database_name, table_name) pks = await db.primary_keys(table_name) pks_for_display = pks if not pks_for_display: @@ -193,7 +193,6 @@ async def display_columns_and_rows( "raw": pk_path, "value": markupsafe.Markup( '{flat_pks}'.format( - base_url=base_url, table_path=datasette.urls.table(database_name, table_name), flat_pks=str(markupsafe.escape(pk_path)), flat_pks_quoted=path_from_row_pks(row, pks, not pks), @@ -274,9 +273,9 @@ async def display_columns_and_rows( ), ) ) - elif column in table_metadata.get("units", {}) and value != "": + elif column in table_config.get("units", {}) and value != "": # Interpret units using pint - value = value * ureg(table_metadata["units"][column]) + value = value * ureg(table_config["units"][column]) # Pint uses floating point which sometimes introduces errors in the compact # representation, which we have to round off to avoid ugliness. In the vast # majority of cases this rounding will be inconsequential. I hope. @@ -591,7 +590,7 @@ class TableDropView(BaseView): try: data = json.loads(await request.post_body()) confirm = data.get("confirm") - except json.JSONDecodeError as e: + except json.JSONDecodeError: pass if not confirm: diff --git a/tests/test_api.py b/tests/test_api.py index 0a1f3725..8cb73dbb 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -771,7 +771,7 @@ def test_databases_json(app_client_two_attached_databases_one_immutable): @pytest.mark.asyncio async def test_metadata_json(ds_client): response = await ds_client.get("/-/metadata.json") - assert response.json() == METADATA + assert response.json() == ds_client.ds.metadata() @pytest.mark.asyncio @@ -1061,3 +1061,102 @@ async def test_config_json(config, expected): ds = Datasette(config=config) response = await ds.client.get("/-/config.json") assert response.json() == expected + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "metadata,expected_config,expected_metadata", + ( + ({}, {}, {}), + ( + # Metadata input + { + "title": "Datasette Fixtures", + "databases": { + "fixtures": { + "tables": { + "sortable": { + "sortable_columns": [ + "sortable", + "sortable_with_nulls", + "sortable_with_nulls_2", + "text", + ], + }, + "no_primary_key": {"sortable_columns": [], "hidden": True}, + "units": {"units": {"distance": "m", "frequency": "Hz"}}, + "primary_key_multiple_columns_explicit_label": { + "label_column": "content2" + }, + "simple_view": {"sortable_columns": ["content"]}, + "searchable_view_configured_by_metadata": { + "fts_table": "searchable_fts", + "fts_pk": "pk", + }, + "roadside_attractions": { + "columns": { + "name": "The name of the attraction", + "address": "The street address for the attraction", + } + }, + "attraction_characteristic": {"sort_desc": "pk"}, + "facet_cities": {"sort": "name"}, + "paginated_view": {"size": 25}, + }, + } + }, + }, + # Should produce a config with just the table configuration keys + { + "databases": { + "fixtures": { + "tables": { + "sortable": { + "sortable_columns": [ + "sortable", + "sortable_with_nulls", + "sortable_with_nulls_2", + "text", + ] + }, + "units": {"units": {"distance": "m", "frequency": "Hz"}}, + # These one get redacted: + "no_primary_key": "***", + "primary_key_multiple_columns_explicit_label": "***", + "simple_view": {"sortable_columns": ["content"]}, + "searchable_view_configured_by_metadata": { + "fts_table": "searchable_fts", + "fts_pk": "pk", + }, + "attraction_characteristic": {"sort_desc": "pk"}, + "facet_cities": {"sort": "name"}, + "paginated_view": {"size": 25}, + } + } + } + }, + # And metadata with everything else + { + "title": "Datasette Fixtures", + "databases": { + "fixtures": { + "tables": { + "roadside_attractions": { + "columns": { + "name": "The name of the attraction", + "address": "The street address for the attraction", + } + }, + } + } + }, + }, + ), + ), +) +async def test_upgrade_metadata(metadata, expected_config, expected_metadata): + ds = Datasette(metadata=metadata) + response = await ds.client.get("/-/config.json") + assert response.json() == expected_config + response2 = await ds.client.get("/-/metadata.json") + assert response2.json() == expected_metadata diff --git a/tests/test_html.py b/tests/test_html.py index 86895844..8229b166 100644 --- a/tests/test_html.py +++ b/tests/test_html.py @@ -753,7 +753,7 @@ async def test_metadata_json_html(ds_client): response = await ds_client.get("/-/metadata") assert response.status_code == 200 pre = Soup(response.content, "html.parser").find("pre") - assert METADATA == json.loads(pre.text) + assert ds_client.ds.metadata() == json.loads(pre.text) @pytest.mark.asyncio