pull/2481/merge
Thomas Chen 2025-10-01 22:55:08 +05:30 zatwierdzone przez GitHub
commit 39a960d08a
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: B5690EEEBB952194
2 zmienionych plików z 217 dodań i 35 usunięć

Wyświetl plik

@ -811,6 +811,31 @@ async def table_view_traced(datasette, request):
context_for_html_hack=context_for_html_hack,
default_labels=default_labels,
)
if isinstance(view_data, Response):
return view_data
data, rows, columns, expanded_columns, sql, next_url = view_data
data.setdefault("_extras", {})
extras = request.args.getlist("_extra")
for extra in extras:
fn = datasette.registry.get(f"extra_{extra}")
if fn:
try:
data["_extras"][extra] = await fn(
datasette=datasette,
request=request,
database=resolved.db.name,
table=resolved.table,
view_name="table",
data=data,
display_columns=data.get("display_columns", []),
)
except Exception as e:
data["_extras"][extra] = {"error": str(e)}
if isinstance(view_data, Response):
return view_data
data, rows, columns, expanded_columns, sql, next_url = view_data
@ -1616,6 +1641,46 @@ async def table_view_data(
async def extra_facets_timed_out(extra_facet_results):
return extra_facet_results["timed_out"]
async def extra_request():
"Full information about the request"
return {
"url": request.url,
"path": request.path,
"full_path": request.full_path,
"host": request.host,
"args": request.args._data,
}
async def extra_extras():
"Available ?_extra= blocks"
all_extras = [
(key[len("extra_"):], fn.__doc__)
for key, fn in registry._registry.items()
if key.startswith("extra_")
]
return [
{
"name": name,
"description": doc,
"toggle_url": datasette.absolute_url(
request,
datasette.urls.path(
path_with_added_args(request, {"_extra": name})
if name not in extras
else path_with_removed_args(request, {"_extra": name})
),
),
"selected": name in extras,
}
for name, doc in all_extras
]
bundles = {
"html": [
"suggested_facets",
@ -1653,40 +1718,54 @@ async def table_view_data(
extras.update(values)
extras.discard(f"_{key}")
registry = Registry(
extra_count,
extra_count_sql,
extra_facet_results,
extra_facets_timed_out,
extra_suggested_facets,
facet_instances,
extra_human_description_en,
extra_next_url,
extra_columns,
extra_primary_keys,
run_display_columns_and_rows,
extra_display_columns,
extra_display_rows,
extra_debug,
extra_request,
extra_query,
extra_metadata,
extra_extras,
extra_database,
extra_table,
extra_database_color,
extra_actions,
extra_filters,
extra_renderers,
extra_custom_table_templates,
extra_sorted_facet_results,
extra_table_definition,
extra_view_definition,
extra_is_view,
extra_private,
extra_expandable_columns,
extra_form_hidden_args,
)
print("Starting registry build")
# Add these one at a time if needed
print("extra_debug:", callable(extra_debug))
print("extra_metadata:", callable(extra_metadata))
print("extra_actions:", callable(extra_actions))
print("Attempting to initialize registry in table.py")
try:
registry = Registry(
extra_count,
extra_count_sql,
extra_facet_results,
extra_facets_timed_out,
extra_suggested_facets,
facet_instances,
extra_human_description_en,
extra_next_url,
extra_columns,
extra_primary_keys,
run_display_columns_and_rows,
extra_display_columns,
extra_display_rows,
extra_debug,
extra_request,
extra_query,
extra_metadata,
extra_extras,
extra_database,
extra_table,
extra_database_color,
extra_actions,
extra_filters,
extra_renderers,
extra_custom_table_templates,
extra_sorted_facet_results,
extra_table_definition,
extra_view_definition,
extra_is_view,
extra_private,
extra_expandable_columns,
extra_form_hidden_args,
)
print("✅ registry initialized successfully")
except Exception as e:
print("❌ registry failed to initialize:", e)
raise
results = await registry.resolve_multi(
["extra_{}".format(extra) for extra in extras]
@ -1810,3 +1889,4 @@ async def _next_value_and_url(
request, datasette.urls.path(path_with_replaced_args(request, added_args))
)
return next_value, next_url

Wyświetl plik

@ -1,4 +1,4 @@
from datasette.utils import detect_json1
from datasette.utils import detect_json1, encode_pk_component, decode_pk_component
from datasette.utils.sqlite import sqlite_version
from .fixtures import ( # noqa
app_client,
@ -11,6 +11,7 @@ from .fixtures import ( # noqa
import json
import pytest
import urllib
import base64
@pytest.mark.asyncio
@ -1382,3 +1383,104 @@ async def test_table_extras(ds_client, extra, expected_json):
)
assert response.status_code == 200
assert response.json() == expected_json
@pytest.mark.asyncio
async def test_encode_decode_pk_component_binary(ds_client):
db = ds_client.ds.databases["fixtures"]
# Create a test table with a BLOB primary key
await db.execute_write(
"""
CREATE TABLE IF NOT EXISTS binary_pk_table (
id BLOB PRIMARY KEY,
value TEXT
);
"""
)
# Insert a binary PK and retrieve it back using the API
binary_pk = b"\x01\x02\x03test\n\xff"
encoded_pk = encode_pk_component(binary_pk)
decoded = decode_pk_component(encoded_pk)
assert decoded == binary_pk, "Decoded value must match original binary data"
# Insert into table
await db.execute_write(
"INSERT OR REPLACE INTO binary_pk_table (id, value) VALUES (?, ?)",
[binary_pk, "test value"],
)
# Queries it through the JSON API using the encoded PK
path = f"/fixtures/binary_pk_table/{encoded_pk}.json"
response = await ds_client.get(path)
assert response.status_code == 200
json_data = response.json()
print("RECEIVED JSON:", json_data)
# Adjust based on actual response shape
try:
assert json_data["rows"][0]["value"] == "test value"
except (KeyError, IndexError, AssertionError):
if "value" in json_data:
assert json_data["value"] == "test value"
elif "row" in json_data and "value" in json_data["row"]:
assert json_data["row"]["value"] == "test value"
else:
raise AssertionError("Could not find 'value' in expected locations of JSON response")
# Checks that the encoded string is base64-safe
assert isinstance(encoded_pk, str)
b64_body = encoded_pk.replace("b64_", "")
padding_needed = 4 - (len(b64_body) % 4)
if padding_needed and padding_needed < 4:
b64_body += "=" * padding_needed
base64.urlsafe_b64decode(b64_body)
@pytest.mark.asyncio
async def test_encode_decode_pk_component_null_byte():
binary_pk = b"\x00\x10binary\0key"
encoded = encode_pk_component(binary_pk)
decoded = decode_pk_component(encoded)
assert decoded == binary_pk
assert isinstance(encoded, str)
@pytest.mark.asyncio
async def test_encode_decode_pk_component_long_key():
binary_pk = b"A" * 100
encoded = encode_pk_component(binary_pk)
decoded = decode_pk_component(encoded)
assert decoded == binary_pk
@pytest.mark.asyncio
async def test_extra_columns_and_request(ds_client):
await ds_client.ds.get_database("fixtures").execute_write(
"CREATE TABLE IF NOT EXISTS demo (id INTEGER PRIMARY KEY, name TEXT)"
)
await ds_client.ds.get_database("fixtures").execute_write(
"INSERT INTO demo (name) VALUES ('Alice')"
)
response = await ds_client.get("/fixtures/demo.json?_extra=columns,request")
assert response.status_code == 200
data = response.json()
assert "columns" in data
assert any(col["name"] == "id" for col in data["columns"])
assert "request" in data
assert "url" in data["request"]
@pytest.mark.asyncio
async def test_extra_metadata(ds_client):
response = await ds_client.get("/fixtures/simple_primary_key.json?_extra=metadata")
assert response.status_code == 200
data = response.json()
assert "metadata" in data
assert "columns" in data["metadata"]
@pytest.mark.asyncio
async def test_extra_debug(ds_client):
response = await ds_client.get("/fixtures/simple_primary_key.json?_extra=debug")
assert response.status_code == 200
data = response.json()
assert "debug" in data
assert "resolved" in data["debug"]
@pytest.mark.asyncio
async def test_extra_actions(ds_client):
response = await ds_client.get("/fixtures/simple_primary_key.json?_extra=actions")
assert response.status_code == 200
assert "actions" in response.json()