kopia lustrzana https://github.com/simonw/datasette
Rename _schemas to _internal, closes #1156
rodzic
ebc7aa287c
commit
dcdfb2c301
|
@ -74,7 +74,7 @@ from .utils.asgi import (
|
||||||
asgi_send_json,
|
asgi_send_json,
|
||||||
asgi_send_redirect,
|
asgi_send_redirect,
|
||||||
)
|
)
|
||||||
from .utils.schemas import init_schemas, populate_schema_tables
|
from .utils.internal_db import init_internal_db, populate_schema_tables
|
||||||
from .utils.sqlite import (
|
from .utils.sqlite import (
|
||||||
sqlite3,
|
sqlite3,
|
||||||
using_pysqlite3,
|
using_pysqlite3,
|
||||||
|
@ -85,8 +85,6 @@ from .version import __version__
|
||||||
|
|
||||||
app_root = Path(__file__).parent.parent
|
app_root = Path(__file__).parent.parent
|
||||||
|
|
||||||
MEMORY = object()
|
|
||||||
|
|
||||||
Setting = collections.namedtuple("Setting", ("name", "default", "help"))
|
Setting = collections.namedtuple("Setting", ("name", "default", "help"))
|
||||||
SETTINGS = (
|
SETTINGS = (
|
||||||
Setting("default_page_size", 100, "Default page size for the table view"),
|
Setting("default_page_size", 100, "Default page size for the table view"),
|
||||||
|
@ -218,24 +216,17 @@ class Datasette:
|
||||||
]
|
]
|
||||||
self.inspect_data = inspect_data
|
self.inspect_data = inspect_data
|
||||||
self.immutables = set(immutables or [])
|
self.immutables = set(immutables or [])
|
||||||
if not self.files:
|
|
||||||
self.files = [MEMORY]
|
|
||||||
elif memory:
|
|
||||||
self.files = (MEMORY,) + self.files
|
|
||||||
self.databases = collections.OrderedDict()
|
self.databases = collections.OrderedDict()
|
||||||
|
if memory or not self.files:
|
||||||
|
self.add_database(":memory:", Database(self, ":memory:", is_memory=True))
|
||||||
# memory_name is a random string so that each Datasette instance gets its own
|
# memory_name is a random string so that each Datasette instance gets its own
|
||||||
# unique in-memory named database - otherwise unit tests can fail with weird
|
# unique in-memory named database - otherwise unit tests can fail with weird
|
||||||
# errors when different instances accidentally share an in-memory database
|
# errors when different instances accidentally share an in-memory database
|
||||||
self.add_database("_schemas", Database(self, memory_name=secrets.token_hex()))
|
self.add_database("_internal", Database(self, memory_name=secrets.token_hex()))
|
||||||
self._schemas_created = False
|
self._interna_db_created = False
|
||||||
for file in self.files:
|
for file in self.files:
|
||||||
path = file
|
path = file
|
||||||
is_memory = False
|
db = Database(self, path, is_mutable=path not in self.immutables)
|
||||||
if file is MEMORY:
|
|
||||||
path = None
|
|
||||||
is_memory = True
|
|
||||||
is_mutable = path not in self.immutables
|
|
||||||
db = Database(self, path, is_mutable=is_mutable, is_memory=is_memory)
|
|
||||||
if db.name in self.databases:
|
if db.name in self.databases:
|
||||||
raise Exception(f"Multiple files with same stem: {db.name}")
|
raise Exception(f"Multiple files with same stem: {db.name}")
|
||||||
self.add_database(db.name, db)
|
self.add_database(db.name, db)
|
||||||
|
@ -333,14 +324,14 @@ class Datasette:
|
||||||
self.client = DatasetteClient(self)
|
self.client = DatasetteClient(self)
|
||||||
|
|
||||||
async def refresh_schemas(self):
|
async def refresh_schemas(self):
|
||||||
schema_db = self.databases["_schemas"]
|
internal_db = self.databases["_internal"]
|
||||||
if not self._schemas_created:
|
if not self._interna_db_created:
|
||||||
await init_schemas(schema_db)
|
await init_internal_db(internal_db)
|
||||||
self._schemas_created = True
|
self._interna_db_created = True
|
||||||
|
|
||||||
current_schema_versions = {
|
current_schema_versions = {
|
||||||
row["database_name"]: row["schema_version"]
|
row["database_name"]: row["schema_version"]
|
||||||
for row in await schema_db.execute(
|
for row in await internal_db.execute(
|
||||||
"select database_name, schema_version from databases"
|
"select database_name, schema_version from databases"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -349,7 +340,7 @@ class Datasette:
|
||||||
# Compare schema versions to see if we should skip it
|
# Compare schema versions to see if we should skip it
|
||||||
if schema_version == current_schema_versions.get(database_name):
|
if schema_version == current_schema_versions.get(database_name):
|
||||||
continue
|
continue
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"""
|
"""
|
||||||
INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version)
|
INSERT OR REPLACE INTO databases (database_name, path, is_memory, schema_version)
|
||||||
VALUES (?, ?, ?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
|
@ -357,7 +348,7 @@ class Datasette:
|
||||||
[database_name, db.path, db.is_memory, schema_version],
|
[database_name, db.path, db.is_memory, schema_version],
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
await populate_schema_tables(schema_db, db)
|
await populate_schema_tables(internal_db, db)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def urls(self):
|
def urls(self):
|
||||||
|
@ -376,7 +367,7 @@ class Datasette:
|
||||||
def get_database(self, name=None):
|
def get_database(self, name=None):
|
||||||
if name is None:
|
if name is None:
|
||||||
# Return first no-_schemas database
|
# Return first no-_schemas database
|
||||||
name = [key for key in self.databases.keys() if key != "_schemas"][0]
|
name = [key for key in self.databases.keys() if key != "_internal"][0]
|
||||||
return self.databases[name]
|
return self.databases[name]
|
||||||
|
|
||||||
def add_database(self, name, db):
|
def add_database(self, name, db):
|
||||||
|
@ -625,7 +616,7 @@ class Datasette:
|
||||||
"hash": d.hash,
|
"hash": d.hash,
|
||||||
}
|
}
|
||||||
for name, d in sorted(self.databases.items(), key=lambda p: p[1].name)
|
for name, d in sorted(self.databases.items(), key=lambda p: p[1].name)
|
||||||
if name != "_schemas"
|
if name != "_internal"
|
||||||
]
|
]
|
||||||
|
|
||||||
def _versions(self):
|
def _versions(self):
|
||||||
|
|
|
@ -134,8 +134,8 @@ async def inspect_(files, sqlite_extensions):
|
||||||
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
||||||
data = {}
|
data = {}
|
||||||
for name, database in app.databases.items():
|
for name, database in app.databases.items():
|
||||||
if name == "_schemas":
|
if name == "_internal":
|
||||||
# Don't include the in-memory _schemas database
|
# Don't include the in-memory _internal database
|
||||||
continue
|
continue
|
||||||
counts = await database.table_counts(limit=3600 * 1000)
|
counts = await database.table_counts(limit=3600 * 1000)
|
||||||
data[name] = {
|
data[name] = {
|
||||||
|
|
|
@ -13,7 +13,7 @@ def permission_allowed(datasette, actor, action, resource):
|
||||||
if allow is not None:
|
if allow is not None:
|
||||||
return actor_matches_allow(actor, allow)
|
return actor_matches_allow(actor, allow)
|
||||||
elif action == "view-database":
|
elif action == "view-database":
|
||||||
if resource == "_schemas" and (actor is None or actor.get("id") != "root"):
|
if resource == "_internal" and (actor is None or actor.get("id") != "root"):
|
||||||
return False
|
return False
|
||||||
database_allow = datasette.metadata("allow", database=resource)
|
database_allow = datasette.metadata("allow", database=resource)
|
||||||
if database_allow is None:
|
if database_allow is None:
|
||||||
|
|
|
@ -1024,7 +1024,7 @@ def find_spatialite():
|
||||||
|
|
||||||
async def initial_path_for_datasette(datasette):
|
async def initial_path_for_datasette(datasette):
|
||||||
"Return suggested path for opening this Datasette, based on number of DBs and tables"
|
"Return suggested path for opening this Datasette, based on number of DBs and tables"
|
||||||
databases = dict([p for p in datasette.databases.items() if p[0] != "_schemas"])
|
databases = dict([p for p in datasette.databases.items() if p[0] != "_internal"])
|
||||||
if len(databases) == 1:
|
if len(databases) == 1:
|
||||||
db_name = next(iter(databases.keys()))
|
db_name = next(iter(databases.keys()))
|
||||||
path = datasette.urls.database(db_name)
|
path = datasette.urls.database(db_name)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
async def init_schemas(db):
|
async def init_internal_db(db):
|
||||||
await db.execute_write(
|
await db.execute_write(
|
||||||
"""
|
"""
|
||||||
CREATE TABLE databases (
|
CREATE TABLE databases (
|
||||||
|
@ -73,15 +73,15 @@ async def init_schemas(db):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def populate_schema_tables(schema_db, db):
|
async def populate_schema_tables(internal_db, db):
|
||||||
database_name = db.name
|
database_name = db.name
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"delete from tables where database_name = ?", [database_name], block=True
|
"delete from tables where database_name = ?", [database_name], block=True
|
||||||
)
|
)
|
||||||
tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows
|
tables = (await db.execute("select * from sqlite_master where type = 'table'")).rows
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table_name = table["name"]
|
table_name = table["name"]
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"""
|
"""
|
||||||
insert into tables (database_name, table_name, rootpage, sql)
|
insert into tables (database_name, table_name, rootpage, sql)
|
||||||
values (?, ?, ?, ?)
|
values (?, ?, ?, ?)
|
||||||
|
@ -90,7 +90,7 @@ async def populate_schema_tables(schema_db, db):
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
# And the columns
|
# And the columns
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"delete from columns where database_name = ? and table_name = ?",
|
"delete from columns where database_name = ? and table_name = ?",
|
||||||
[database_name, table_name],
|
[database_name, table_name],
|
||||||
block=True,
|
block=True,
|
||||||
|
@ -101,7 +101,7 @@ async def populate_schema_tables(schema_db, db):
|
||||||
**{"database_name": database_name, "table_name": table_name},
|
**{"database_name": database_name, "table_name": table_name},
|
||||||
**column._asdict(),
|
**column._asdict(),
|
||||||
}
|
}
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"""
|
"""
|
||||||
insert into columns (
|
insert into columns (
|
||||||
database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden
|
database_name, table_name, cid, name, type, "notnull", default_value, is_pk, hidden
|
||||||
|
@ -113,7 +113,7 @@ async def populate_schema_tables(schema_db, db):
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
# And the foreign_keys
|
# And the foreign_keys
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"delete from foreign_keys where database_name = ? and table_name = ?",
|
"delete from foreign_keys where database_name = ? and table_name = ?",
|
||||||
[database_name, table_name],
|
[database_name, table_name],
|
||||||
block=True,
|
block=True,
|
||||||
|
@ -126,7 +126,7 @@ async def populate_schema_tables(schema_db, db):
|
||||||
**{"database_name": database_name, "table_name": table_name},
|
**{"database_name": database_name, "table_name": table_name},
|
||||||
**dict(foreign_key),
|
**dict(foreign_key),
|
||||||
}
|
}
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"""
|
"""
|
||||||
insert into foreign_keys (
|
insert into foreign_keys (
|
||||||
database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match
|
database_name, table_name, "id", seq, "table", "from", "to", on_update, on_delete, match
|
||||||
|
@ -138,7 +138,7 @@ async def populate_schema_tables(schema_db, db):
|
||||||
block=True,
|
block=True,
|
||||||
)
|
)
|
||||||
# And the indexes
|
# And the indexes
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"delete from indexes where database_name = ? and table_name = ?",
|
"delete from indexes where database_name = ? and table_name = ?",
|
||||||
[database_name, table_name],
|
[database_name, table_name],
|
||||||
block=True,
|
block=True,
|
||||||
|
@ -149,7 +149,7 @@ async def populate_schema_tables(schema_db, db):
|
||||||
**{"database_name": database_name, "table_name": table_name},
|
**{"database_name": database_name, "table_name": table_name},
|
||||||
**dict(index),
|
**dict(index),
|
||||||
}
|
}
|
||||||
await schema_db.execute_write(
|
await internal_db.execute_write(
|
||||||
"""
|
"""
|
||||||
insert into indexes (
|
insert into indexes (
|
||||||
database_name, table_name, seq, name, "unique", origin, partial
|
database_name, table_name, seq, name, "unique", origin, partial
|
|
@ -2,36 +2,36 @@ from .fixtures import app_client
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
def test_schemas_only_available_to_root(app_client):
|
def test_internal_only_available_to_root(app_client):
|
||||||
cookie = app_client.actor_cookie({"id": "root"})
|
cookie = app_client.actor_cookie({"id": "root"})
|
||||||
assert app_client.get("/_schemas").status == 403
|
assert app_client.get("/_internal").status == 403
|
||||||
assert app_client.get("/_schemas", cookies={"ds_actor": cookie}).status == 200
|
assert app_client.get("/_internal", cookies={"ds_actor": cookie}).status == 200
|
||||||
|
|
||||||
|
|
||||||
def test_schemas_databases(app_client):
|
def test_internal_databases(app_client):
|
||||||
cookie = app_client.actor_cookie({"id": "root"})
|
cookie = app_client.actor_cookie({"id": "root"})
|
||||||
databases = app_client.get(
|
databases = app_client.get(
|
||||||
"/_schemas/databases.json?_shape=array", cookies={"ds_actor": cookie}
|
"/_internal/databases.json?_shape=array", cookies={"ds_actor": cookie}
|
||||||
).json
|
).json
|
||||||
assert len(databases) == 2
|
assert len(databases) == 2
|
||||||
assert databases[0]["database_name"] == "_schemas"
|
assert databases[0]["database_name"] == "_internal"
|
||||||
assert databases[1]["database_name"] == "fixtures"
|
assert databases[1]["database_name"] == "fixtures"
|
||||||
|
|
||||||
|
|
||||||
def test_schemas_tables(app_client):
|
def test_internal_tables(app_client):
|
||||||
cookie = app_client.actor_cookie({"id": "root"})
|
cookie = app_client.actor_cookie({"id": "root"})
|
||||||
tables = app_client.get(
|
tables = app_client.get(
|
||||||
"/_schemas/tables.json?_shape=array", cookies={"ds_actor": cookie}
|
"/_internal/tables.json?_shape=array", cookies={"ds_actor": cookie}
|
||||||
).json
|
).json
|
||||||
assert len(tables) > 5
|
assert len(tables) > 5
|
||||||
table = tables[0]
|
table = tables[0]
|
||||||
assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"}
|
assert set(table.keys()) == {"rootpage", "table_name", "database_name", "sql"}
|
||||||
|
|
||||||
|
|
||||||
def test_schemas_indexes(app_client):
|
def test_internal_indexes(app_client):
|
||||||
cookie = app_client.actor_cookie({"id": "root"})
|
cookie = app_client.actor_cookie({"id": "root"})
|
||||||
indexes = app_client.get(
|
indexes = app_client.get(
|
||||||
"/_schemas/indexes.json?_shape=array", cookies={"ds_actor": cookie}
|
"/_internal/indexes.json?_shape=array", cookies={"ds_actor": cookie}
|
||||||
).json
|
).json
|
||||||
assert len(indexes) > 5
|
assert len(indexes) > 5
|
||||||
index = indexes[0]
|
index = indexes[0]
|
||||||
|
@ -46,10 +46,10 @@ def test_schemas_indexes(app_client):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_schemas_foreign_keys(app_client):
|
def test_internal_foreign_keys(app_client):
|
||||||
cookie = app_client.actor_cookie({"id": "root"})
|
cookie = app_client.actor_cookie({"id": "root"})
|
||||||
foreign_keys = app_client.get(
|
foreign_keys = app_client.get(
|
||||||
"/_schemas/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie}
|
"/_internal/foreign_keys.json?_shape=array", cookies={"ds_actor": cookie}
|
||||||
).json
|
).json
|
||||||
assert len(foreign_keys) > 5
|
assert len(foreign_keys) > 5
|
||||||
foreign_key = foreign_keys[0]
|
foreign_key = foreign_keys[0]
|
|
@ -293,7 +293,7 @@ def test_hook_extra_body_script(app_client, path, expected_extra_body_script):
|
||||||
|
|
||||||
def test_hook_asgi_wrapper(app_client):
|
def test_hook_asgi_wrapper(app_client):
|
||||||
response = app_client.get("/fixtures")
|
response = app_client.get("/fixtures")
|
||||||
assert "_schemas, fixtures" == response.headers["x-databases"]
|
assert "_internal, fixtures" == response.headers["x-databases"]
|
||||||
|
|
||||||
|
|
||||||
def test_hook_extra_template_vars(restore_working_directory):
|
def test_hook_extra_template_vars(restore_working_directory):
|
||||||
|
|
Ładowanie…
Reference in New Issue