Partial json_renderer refactor, part of #2136

json_renderer_refactor
Simon Willison 2023-08-09 11:52:51 -07:00
rodzic 8920d425f4
commit f3944608cc
5 zmienionych plików z 85 dodań i 74 usunięć

Wyświetl plik

@ -27,77 +27,88 @@ def convert_specific_columns_to_json(rows, columns, json_cols):
return new_rows
def json_renderer(args, data, error, truncated=None):
def json_renderer(args, rows, columns, internal_data, error, truncated=None):
"""Render a response as JSON"""
status_code = 200
# Handle the _json= parameter which may modify data["rows"]
# Turn rows into a list of lists
row_lists = [list(row) for row in rows]
row_dicts = None
# Handle the _json= parameter which may modify the rows
json_cols = []
if "_json" in args:
json_cols = args.getlist("_json")
if json_cols and "rows" in data and "columns" in data:
data["rows"] = convert_specific_columns_to_json(
data["rows"], data["columns"], json_cols
)
if json_cols:
row_lists = convert_specific_columns_to_json(row_lists, columns, json_cols)
# unless _json_infinity=1 requested, replace infinity with None
if "rows" in data and not value_as_boolean(args.get("_json_infinity", "0")):
data["rows"] = [remove_infinites(row) for row in data["rows"]]
if not value_as_boolean(args.get("_json_infinity", "0")):
row_lists = [remove_infinites(row) for row in row_lists]
nl = args.get("_nl", "")
if internal_data:
return_data = internal_data
else:
return_data = {"ok": True}
# Deal with the _shape option
shape = args.get("_shape", "objects")
# if there's an error, ignore the shape entirely
data["ok"] = True
if error:
shape = "objects"
status_code = 400
data["error"] = error
data["ok"] = False
return_data["ok"] = False
return_data["error"] = error
# return_data["rows"] is either lists or dicts
if shape in ("objects", "object", "array"):
row_dicts = [dict(zip(columns, row)) for row in row_lists]
return_data["rows"] = row_dicts
else:
return_data["rows"] = row_lists
if truncated is not None:
data["truncated"] = truncated
return_data["truncated"] = truncated
if shape == "arrayfirst":
if not data["rows"]:
data = []
elif isinstance(data["rows"][0], sqlite3.Row):
data = [row[0] for row in data["rows"]]
if shape == "objects":
pass
elif shape == "arrayfirst":
# Special case, return array as root object
return_data = [next(iter(row)) for row in row_lists]
elif shape == "object":
shape_error = None
if "primary_keys" not in data:
shape_error = "_shape=object is only available on tables"
else:
assert isinstance(data["rows"][0], dict)
data = [next(iter(row.values())) for row in data["rows"]]
elif shape in ("objects", "object", "array"):
columns = data.get("columns")
rows = data.get("rows")
if rows and columns:
data["rows"] = [dict(zip(columns, row)) for row in rows]
if shape == "object":
shape_error = None
if "primary_keys" not in data:
shape_error = "_shape=object is only available on tables"
pks = data["primary_keys"]
if not pks:
shape_error = (
"_shape=object not available for tables with no primary keys"
)
else:
pks = data["primary_keys"]
if not pks:
shape_error = (
"_shape=object not available for tables with no primary keys"
)
else:
object_rows = {}
for row in data["rows"]:
pk_string = path_from_row_pks(row, pks, not pks)
object_rows[pk_string] = row
data = object_rows
if shape_error:
data = {"ok": False, "error": shape_error}
elif shape == "array":
data = data["rows"]
object_row = {}
for row in return_data["rows"]:
pk_string = path_from_row_pks(row, pks, not pks)
object_row[pk_string] = row
return_data = object_row
if shape_error:
return_data = {"ok": False, "error": shape_error}
elif shape == "array":
# Return an array of objects
if nl:
body = "\n".join(
json.dumps(item, cls=CustomJSONEncoder) for item in row_dicts
)
content_type = "text/plain"
else:
body = json.dumps(row_dicts, cls=CustomJSONEncoder)
content_type = "application/json; charset=utf-8"
return Response(body, status=status_code, content_type=content_type)
elif shape == "arrays":
if not data["rows"]:
pass
elif isinstance(data["rows"][0], sqlite3.Row):
data["rows"] = [list(row) for row in data["rows"]]
else:
data["rows"] = [list(row.values()) for row in data["rows"]]
return_data["rows"] = row_lists
else:
status_code = 400
data = {
@ -106,15 +117,8 @@ def json_renderer(args, data, error, truncated=None):
"status": 400,
"title": None,
}
# Handle _nl option for _shape=array
nl = args.get("_nl", "")
if nl and shape == "array":
body = "\n".join(json.dumps(item, cls=CustomJSONEncoder) for item in data)
content_type = "text/plain"
else:
body = json.dumps(data, cls=CustomJSONEncoder)
content_type = "application/json; charset=utf-8"
headers = {}
return Response(
body, status=status_code, headers=headers, content_type=content_type
json.dumps(return_data, cls=CustomJSONEncoder),
status=status_code,
content_type="application/json; charset=utf-8",
)

Wyświetl plik

@ -548,7 +548,7 @@ class QueryView(View):
error=query_error,
# These will be deprecated in Datasette 1.0:
args=request.args,
data={"rows": rows, "columns": columns},
data={"ok": True, "rows": rows},
)
if asyncio.iscoroutine(result):
result = await result
@ -598,7 +598,7 @@ class QueryView(View):
it_can_render = call_with_supported_arguments(
can_render,
datasette=datasette,
columns=data.get("columns") or [],
columns=columns or [],
rows=data.get("rows") or [],
sql=data.get("query", {}).get("sql", None),
query_name=data.get("query_name"),

Wyświetl plik

@ -9,10 +9,10 @@ through the Datasette user interface can also be accessed as JSON via the API.
To access the API for a page, either click on the ``.json`` link on that page or
edit the URL and add a ``.json`` extension to it.
.. _json_api_shapes:
.. _json_api_default:
Different shapes
----------------
Default representation
----------------------
The default JSON representation of data from a SQLite table or custom query
looks like this:
@ -21,7 +21,6 @@ looks like this:
{
"ok": true,
"next": null,
"rows": [
{
"id": 3,
@ -39,13 +38,22 @@ looks like this:
"id": 1,
"name": "San Francisco"
}
]
],
"truncated": false
}
The ``rows`` key is a list of objects, each one representing a row. ``next`` indicates if
there is another page, and ``ok`` is always ``true`` if an error did not occur.
``"ok"`` is always ``true`` if an error did not occur.
If ``next`` is present then the next page in the pagination set can be retrieved using ``?_next=VALUE``.
The ``"rows"`` key is a list of objects, each one representing a row.
The ``"truncated"`` key lets you know if the query was truncated. This can happen if a SQL query returns more than 1,000 results (or the :ref:`setting_max_returned_rows` setting).
For table pages, an additional key ``"next"`` may be present. This indicates that the next page in the pagination set can be retrieved using ``?_next=VALUE``.
.. _json_api_shapes:
Different shapes
----------------
The ``_shape`` parameter can be used to access alternative formats for the
``rows`` key which may be more convenient for your application. There are three

Wyświetl plik

@ -649,7 +649,6 @@ async def test_custom_sql(ds_client):
{"content": "RENDER_CELL_DEMO"},
{"content": "RENDER_CELL_ASYNC"},
],
"columns": ["content"],
"ok": True,
"truncated": False,
}

Wyświetl plik

@ -35,10 +35,10 @@ def test_serve_with_get(tmp_path_factory):
],
)
assert 0 == result.exit_code, result.output
assert {
"truncated": False,
"columns": ["sqlite_version()"],
}.items() <= json.loads(result.output).items()
data = json.loads(result.output)
# Should have a single row with a single column
assert len(data["rows"]) == 1
assert list(data["rows"][0].keys()) == ["sqlite_version()"]
# The plugin should have created hello.txt
assert (plugins_dir / "hello.txt").read_text() == "hello"