kopia lustrzana https://github.com/simonw/datasette
De-duplicate 'datasette db.db db.db', closes #1632
Refs https://github.com/simonw/datasette-publish-fly/pull/12parallel-queries
rodzic
03305ea183
commit
0cd982fc6a
|
@ -549,6 +549,9 @@ def serve(
|
|||
)
|
||||
)
|
||||
|
||||
# De-duplicate files so 'datasette db.db db.db' only attaches one /db
|
||||
files = list(dict.fromkeys(files))
|
||||
|
||||
try:
|
||||
ds = Datasette(files, **kwargs)
|
||||
except SpatialiteNotFound:
|
||||
|
|
|
@ -257,6 +257,7 @@ def test_serve_create(ensure_eventloop, tmpdir):
|
|||
|
||||
|
||||
def test_serve_duplicate_database_names(ensure_eventloop, tmpdir):
|
||||
"'datasette db.db nested/db.db' should attach two databases, /db and /db_2"
|
||||
runner = CliRunner()
|
||||
db_1_path = str(tmpdir / "db.db")
|
||||
nested = tmpdir / "nested"
|
||||
|
@ -270,6 +271,17 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir):
|
|||
assert {db["name"] for db in databases} == {"db", "db_2"}
|
||||
|
||||
|
||||
def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir):
|
||||
"'datasette db.db db.db' should only attach one database, /db"
|
||||
runner = CliRunner()
|
||||
db_path = str(tmpdir / "db.db")
|
||||
sqlite3.connect(db_path).execute("vacuum")
|
||||
result = runner.invoke(cli, [db_path, db_path, "--get", "/-/databases.json"])
|
||||
assert result.exit_code == 0, result.output
|
||||
databases = json.loads(result.output)
|
||||
assert {db["name"] for db in databases} == {"db"}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"filename", ["test-database (1).sqlite", "database (1).sqlite"]
|
||||
)
|
||||
|
|
Ładowanie…
Reference in New Issue