2019-05-02 00:39:39 +00:00
|
|
|
import hashlib
|
2018-05-13 12:58:28 +00:00
|
|
|
import json
|
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
from sanic import response
|
2018-05-13 12:58:28 +00:00
|
|
|
|
2019-05-02 00:39:39 +00:00
|
|
|
from datasette.utils import (
|
|
|
|
CustomJSONEncoder,
|
|
|
|
InterruptedError,
|
|
|
|
detect_primary_keys,
|
|
|
|
detect_fts,
|
|
|
|
)
|
2018-05-13 12:44:22 +00:00
|
|
|
from datasette.version import __version__
|
2018-05-13 12:58:28 +00:00
|
|
|
|
|
|
|
from .base import HASH_LENGTH, RenderMixin
|
2018-05-13 12:44:22 +00:00
|
|
|
|
|
|
|
|
2019-05-16 03:02:33 +00:00
|
|
|
# Truncate table list on homepage at:
|
2019-05-16 00:28:07 +00:00
|
|
|
TRUNCATE_AT = 5
|
|
|
|
|
2019-05-16 03:02:33 +00:00
|
|
|
# Only attempt counts if less than this many tables:
|
|
|
|
COUNT_TABLE_LIMIT = 30
|
|
|
|
|
2019-05-16 00:28:07 +00:00
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
class IndexView(RenderMixin):
|
2019-05-04 02:15:14 +00:00
|
|
|
name = "index"
|
2018-05-13 12:55:15 +00:00
|
|
|
|
2018-05-13 12:44:22 +00:00
|
|
|
def __init__(self, datasette):
|
|
|
|
self.ds = datasette
|
|
|
|
|
2018-06-15 06:51:23 +00:00
|
|
|
async def get(self, request, as_format):
|
2018-05-13 12:44:22 +00:00
|
|
|
databases = []
|
2019-05-02 00:39:39 +00:00
|
|
|
for name, db in self.ds.databases.items():
|
2019-05-16 03:02:33 +00:00
|
|
|
table_names = await db.table_names()
|
|
|
|
hidden_table_names = set(await db.hidden_table_names())
|
2019-05-02 00:39:39 +00:00
|
|
|
views = await db.view_names()
|
2019-05-16 03:02:33 +00:00
|
|
|
# Perform counts only for immutable or DBS with <= COUNT_TABLE_LIMIT tables
|
|
|
|
table_counts = {}
|
|
|
|
if not db.is_mutable or len(table_names) <= COUNT_TABLE_LIMIT:
|
|
|
|
table_counts = await db.table_counts(10)
|
|
|
|
# If any of these are None it means at least one timed out - ignore them all
|
|
|
|
if any(v is None for v in table_counts.values()):
|
|
|
|
table_counts = {}
|
2019-05-02 00:39:39 +00:00
|
|
|
tables = {}
|
2019-05-16 03:02:33 +00:00
|
|
|
for table in table_names:
|
2019-05-27 04:56:43 +00:00
|
|
|
table_columns = await db.table_columns(table)
|
2019-05-02 00:39:39 +00:00
|
|
|
tables[table] = {
|
|
|
|
"name": table,
|
|
|
|
"columns": table_columns,
|
|
|
|
"primary_keys": await self.ds.execute_against_connection_in_thread(
|
|
|
|
name, lambda conn: detect_primary_keys(conn, table)
|
|
|
|
),
|
2019-05-16 03:02:33 +00:00
|
|
|
"count": table_counts.get(table),
|
2019-05-02 00:39:39 +00:00
|
|
|
"hidden": table in hidden_table_names,
|
|
|
|
"fts_table": await self.ds.execute_against_connection_in_thread(
|
|
|
|
name, lambda conn: detect_fts(conn, table)
|
|
|
|
),
|
2019-05-16 03:23:33 +00:00
|
|
|
"num_relationships_for_sorting": 0,
|
2019-05-02 00:39:39 +00:00
|
|
|
}
|
2019-05-16 03:23:33 +00:00
|
|
|
|
|
|
|
if request.args.get("_sort") == "relationships" or not table_counts:
|
|
|
|
# We will be sorting by number of relationships, so populate that field
|
|
|
|
all_foreign_keys = await db.get_all_foreign_keys()
|
|
|
|
for table, foreign_keys in all_foreign_keys.items():
|
|
|
|
count = len(foreign_keys["incoming"] + foreign_keys["outgoing"])
|
|
|
|
tables[table]["num_relationships_for_sorting"] = count
|
|
|
|
|
2019-05-02 00:39:39 +00:00
|
|
|
hidden_tables = [t for t in tables.values() if t["hidden"]]
|
2019-05-16 00:28:07 +00:00
|
|
|
visible_tables = [t for t in tables.values() if not t["hidden"]]
|
|
|
|
|
|
|
|
tables_and_views_truncated = list(
|
|
|
|
sorted(
|
|
|
|
(t for t in tables.values() if t not in hidden_tables),
|
2019-05-16 03:23:33 +00:00
|
|
|
key=lambda t: (
|
|
|
|
t["num_relationships_for_sorting"],
|
|
|
|
t["count"] or 0,
|
|
|
|
t["name"],
|
|
|
|
),
|
2019-05-16 00:28:07 +00:00
|
|
|
reverse=True,
|
|
|
|
)[:TRUNCATE_AT]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Only add views if this is less than TRUNCATE_AT
|
|
|
|
if len(tables_and_views_truncated) < TRUNCATE_AT:
|
|
|
|
num_views_to_add = TRUNCATE_AT - len(tables_and_views_truncated)
|
|
|
|
for view_name in views[:num_views_to_add]:
|
|
|
|
tables_and_views_truncated.append({"name": view_name})
|
2019-05-02 00:39:39 +00:00
|
|
|
|
2019-05-04 02:15:14 +00:00
|
|
|
databases.append(
|
|
|
|
{
|
|
|
|
"name": name,
|
|
|
|
"hash": db.hash,
|
|
|
|
"color": db.hash[:6]
|
|
|
|
if db.hash
|
|
|
|
else hashlib.md5(name.encode("utf8")).hexdigest()[:6],
|
|
|
|
"path": self.database_url(name),
|
2019-05-16 00:28:07 +00:00
|
|
|
"tables_and_views_truncated": tables_and_views_truncated,
|
|
|
|
"tables_and_views_more": (len(visible_tables) + len(views))
|
|
|
|
> TRUNCATE_AT,
|
|
|
|
"tables_count": len(visible_tables),
|
2019-05-16 00:40:10 +00:00
|
|
|
"table_rows_sum": sum((t["count"] or 0) for t in visible_tables),
|
2019-05-16 03:02:33 +00:00
|
|
|
"show_table_row_counts": bool(table_counts),
|
2019-05-10 03:58:19 +00:00
|
|
|
"hidden_table_rows_sum": sum(
|
|
|
|
t["count"] for t in hidden_tables if t["count"] is not None
|
|
|
|
),
|
2019-05-04 02:15:14 +00:00
|
|
|
"hidden_tables_count": len(hidden_tables),
|
|
|
|
"views_count": len(views),
|
|
|
|
}
|
|
|
|
)
|
2019-05-16 00:40:10 +00:00
|
|
|
|
|
|
|
databases.sort(key=lambda database: database["name"])
|
|
|
|
|
2018-06-15 06:51:23 +00:00
|
|
|
if as_format:
|
2018-05-13 12:44:22 +00:00
|
|
|
headers = {}
|
|
|
|
if self.ds.cors:
|
2018-05-13 12:55:15 +00:00
|
|
|
headers["Access-Control-Allow-Origin"] = "*"
|
2018-05-13 12:44:22 +00:00
|
|
|
return response.HTTPResponse(
|
2018-05-13 12:55:15 +00:00
|
|
|
json.dumps({db["name"]: db for db in databases}, cls=CustomJSONEncoder),
|
|
|
|
content_type="application/json",
|
2018-05-13 12:44:22 +00:00
|
|
|
headers=headers,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return self.render(
|
2018-05-13 12:55:15 +00:00
|
|
|
["index.html"],
|
2018-05-13 12:44:22 +00:00
|
|
|
databases=databases,
|
2018-08-13 14:56:50 +00:00
|
|
|
metadata=self.ds.metadata(),
|
2018-05-13 12:44:22 +00:00
|
|
|
datasette_version=__version__,
|
|
|
|
)
|