2019-05-11 21:36:57 +00:00
|
|
|
import asyncio
|
2019-06-24 03:13:09 +00:00
|
|
|
import uvicorn
|
2017-10-27 07:08:24 +00:00
|
|
|
import click
|
2018-05-20 17:01:49 +00:00
|
|
|
from click import formatting
|
2020-11-24 20:01:47 +00:00
|
|
|
from click.types import CompositeParamType
|
2017-11-04 23:53:50 +00:00
|
|
|
from click_default_group import DefaultGroup
|
2023-01-07 23:56:03 +00:00
|
|
|
import functools
|
2017-11-11 20:10:51 +00:00
|
|
|
import json
|
2017-12-03 16:33:36 +00:00
|
|
|
import os
|
2020-04-27 16:30:24 +00:00
|
|
|
import pathlib
|
2022-12-13 04:18:42 +00:00
|
|
|
from runpy import run_module
|
2017-11-11 16:00:00 +00:00
|
|
|
import shutil
|
publish_subcommand hook + default plugins mechanism, used for publish heroku/now (#349)
This change introduces a new plugin hook, publish_subcommand, which can be
used to implement new subcommands for the "datasette publish" command family.
I've used this new hook to refactor out the "publish now" and "publish heroku"
implementations into separate modules. I've also added unit tests for these
two publishers, mocking the subprocess.call and subprocess.check_output
functions.
As part of this, I introduced a mechanism for loading default plugins. These
are defined in the new "default_plugins" list inside datasette/app.py
Closes #217 (Plugin support for datasette publish)
Closes #348 (Unit tests for "datasette publish")
Refs #14, #59, #102, #103, #146, #236, #347
2018-07-26 05:15:59 +00:00
|
|
|
from subprocess import call
|
2017-11-11 16:00:00 +00:00
|
|
|
import sys
|
2023-01-07 23:56:03 +00:00
|
|
|
import textwrap
|
2020-09-22 14:26:47 +00:00
|
|
|
import webbrowser
|
2022-03-19 00:19:31 +00:00
|
|
|
from .app import (
|
|
|
|
OBSOLETE_SETTINGS,
|
|
|
|
Datasette,
|
|
|
|
DEFAULT_SETTINGS,
|
|
|
|
SETTINGS,
|
|
|
|
SQLITE_LIMIT_ATTACHED,
|
|
|
|
pm,
|
|
|
|
)
|
2018-06-16 16:44:31 +00:00
|
|
|
from .utils import (
|
2022-08-23 18:34:30 +00:00
|
|
|
LoadExtension,
|
2020-11-24 20:37:29 +00:00
|
|
|
StartupError,
|
2020-02-15 17:56:48 +00:00
|
|
|
check_connection,
|
2020-11-29 20:13:16 +00:00
|
|
|
find_spatialite,
|
2020-04-02 19:30:53 +00:00
|
|
|
parse_metadata,
|
2020-02-15 17:56:48 +00:00
|
|
|
ConnectionProblem,
|
|
|
|
SpatialiteConnectionProblem,
|
2020-10-26 05:06:20 +00:00
|
|
|
initial_path_for_datasette,
|
2018-06-16 16:44:31 +00:00
|
|
|
temporary_docker_directory,
|
|
|
|
value_as_boolean,
|
2020-10-19 22:37:31 +00:00
|
|
|
SpatialiteNotFound,
|
publish_subcommand hook + default plugins mechanism, used for publish heroku/now (#349)
This change introduces a new plugin hook, publish_subcommand, which can be
used to implement new subcommands for the "datasette publish" command family.
I've used this new hook to refactor out the "publish now" and "publish heroku"
implementations into separate modules. I've also added unit tests for these
two publishers, mocking the subprocess.call and subprocess.check_output
functions.
As part of this, I introduced a mechanism for loading default plugins. These
are defined in the new "default_plugins" list inside datasette/app.py
Closes #217 (Plugin support for datasette publish)
Closes #348 (Unit tests for "datasette publish")
Refs #14, #59, #102, #103, #146, #236, #347
2018-07-26 05:15:59 +00:00
|
|
|
StaticMount,
|
2018-06-16 16:44:31 +00:00
|
|
|
ValueAsBooleanError,
|
|
|
|
)
|
2020-12-09 19:45:45 +00:00
|
|
|
from .utils.sqlite import sqlite3
|
2020-08-12 00:24:40 +00:00
|
|
|
from .utils.testing import TestClient
|
2020-10-29 03:38:15 +00:00
|
|
|
from .version import __version__
|
2017-10-27 07:08:24 +00:00
|
|
|
|
2021-08-03 16:12:48 +00:00
|
|
|
# Use Rich for tracebacks if it is installed
|
|
|
|
try:
|
|
|
|
from rich.traceback import install
|
|
|
|
|
|
|
|
install(show_locals=True)
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
2017-11-04 23:53:50 +00:00
|
|
|
|
2018-05-20 17:01:49 +00:00
|
|
|
class Config(click.ParamType):
|
2020-11-24 20:01:47 +00:00
|
|
|
# This will be removed in Datasette 1.0 in favour of class Setting
|
2018-05-20 17:01:49 +00:00
|
|
|
name = "config"
|
2018-05-18 05:08:26 +00:00
|
|
|
|
2018-05-25 01:12:27 +00:00
|
|
|
def convert(self, config, param, ctx):
|
|
|
|
if ":" not in config:
|
2020-11-15 23:24:22 +00:00
|
|
|
self.fail(f'"{config}" should be name:value', param, ctx)
|
2018-05-25 01:12:27 +00:00
|
|
|
return
|
2020-03-25 00:18:43 +00:00
|
|
|
name, value = config.split(":", 1)
|
2020-11-24 21:22:33 +00:00
|
|
|
if name not in DEFAULT_SETTINGS:
|
2022-03-19 00:25:14 +00:00
|
|
|
msg = (
|
|
|
|
OBSOLETE_SETTINGS.get(name)
|
|
|
|
or f"{name} is not a valid option (--help-settings to see all)"
|
|
|
|
)
|
2018-06-16 16:44:31 +00:00
|
|
|
self.fail(
|
2022-03-19 00:19:31 +00:00
|
|
|
msg,
|
2019-05-04 02:15:14 +00:00
|
|
|
param,
|
|
|
|
ctx,
|
2018-06-16 16:44:31 +00:00
|
|
|
)
|
2018-05-25 01:12:27 +00:00
|
|
|
return
|
|
|
|
# Type checking
|
2020-11-24 21:22:33 +00:00
|
|
|
default = DEFAULT_SETTINGS[name]
|
2018-05-25 01:12:27 +00:00
|
|
|
if isinstance(default, bool):
|
2018-06-16 16:44:31 +00:00
|
|
|
try:
|
|
|
|
return name, value_as_boolean(value)
|
|
|
|
except ValueAsBooleanError:
|
2020-11-15 23:24:22 +00:00
|
|
|
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
2018-05-25 01:12:27 +00:00
|
|
|
return
|
|
|
|
elif isinstance(default, int):
|
|
|
|
if not value.isdigit():
|
2020-11-15 23:24:22 +00:00
|
|
|
self.fail(f'"{name}" should be an integer', param, ctx)
|
2018-05-25 01:12:27 +00:00
|
|
|
return
|
|
|
|
return name, int(value)
|
2020-03-25 00:18:43 +00:00
|
|
|
elif isinstance(default, str):
|
|
|
|
return name, value
|
2018-05-25 01:12:27 +00:00
|
|
|
else:
|
|
|
|
# Should never happen:
|
2019-05-04 02:15:14 +00:00
|
|
|
self.fail("Invalid option")
|
2018-05-18 05:08:26 +00:00
|
|
|
|
|
|
|
|
2020-11-24 20:01:47 +00:00
|
|
|
class Setting(CompositeParamType):
|
|
|
|
name = "setting"
|
|
|
|
arity = 2
|
|
|
|
|
|
|
|
def convert(self, config, param, ctx):
|
|
|
|
name, value = config
|
2020-11-24 21:22:33 +00:00
|
|
|
if name not in DEFAULT_SETTINGS:
|
2022-03-19 00:25:14 +00:00
|
|
|
msg = (
|
|
|
|
OBSOLETE_SETTINGS.get(name)
|
|
|
|
or f"{name} is not a valid option (--help-settings to see all)"
|
|
|
|
)
|
2020-11-24 20:01:47 +00:00
|
|
|
self.fail(
|
2022-03-19 00:19:31 +00:00
|
|
|
msg,
|
2020-11-24 20:01:47 +00:00
|
|
|
param,
|
|
|
|
ctx,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
# Type checking
|
2020-11-24 21:22:33 +00:00
|
|
|
default = DEFAULT_SETTINGS[name]
|
2020-11-24 20:01:47 +00:00
|
|
|
if isinstance(default, bool):
|
|
|
|
try:
|
|
|
|
return name, value_as_boolean(value)
|
|
|
|
except ValueAsBooleanError:
|
|
|
|
self.fail(f'"{name}" should be on/off/true/false/1/0', param, ctx)
|
|
|
|
return
|
|
|
|
elif isinstance(default, int):
|
|
|
|
if not value.isdigit():
|
|
|
|
self.fail(f'"{name}" should be an integer', param, ctx)
|
|
|
|
return
|
|
|
|
return name, int(value)
|
|
|
|
elif isinstance(default, str):
|
|
|
|
return name, value
|
|
|
|
else:
|
|
|
|
# Should never happen:
|
|
|
|
self.fail("Invalid option")
|
|
|
|
|
|
|
|
|
2020-12-29 22:16:05 +00:00
|
|
|
def sqlite_extensions(fn):
|
2023-01-07 23:56:03 +00:00
|
|
|
fn = click.option(
|
2020-12-29 22:16:05 +00:00
|
|
|
"sqlite_extensions",
|
|
|
|
"--load-extension",
|
2022-08-23 18:34:30 +00:00
|
|
|
type=LoadExtension(),
|
2020-12-29 22:16:05 +00:00
|
|
|
envvar="SQLITE_EXTENSIONS",
|
|
|
|
multiple=True,
|
2022-08-23 18:34:30 +00:00
|
|
|
help="Path to a SQLite extension to load, and optional entrypoint",
|
2020-12-29 22:16:05 +00:00
|
|
|
)(fn)
|
2023-01-07 23:56:03 +00:00
|
|
|
# Wrap it in a custom error handler
|
|
|
|
@functools.wraps(fn)
|
|
|
|
def wrapped(*args, **kwargs):
|
|
|
|
try:
|
|
|
|
return fn(*args, **kwargs)
|
|
|
|
except AttributeError as e:
|
|
|
|
if "enable_load_extension" in str(e):
|
|
|
|
raise click.ClickException(
|
|
|
|
textwrap.dedent(
|
|
|
|
"""
|
|
|
|
Your Python installation does not have the ability to load SQLite extensions.
|
|
|
|
|
|
|
|
More information: https://datasette.io/help/extensions
|
|
|
|
"""
|
|
|
|
).strip()
|
|
|
|
)
|
|
|
|
raise
|
|
|
|
|
|
|
|
return wrapped
|
2020-12-29 22:16:05 +00:00
|
|
|
|
|
|
|
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.group(cls=DefaultGroup, default="serve", default_if_no_args=True)
|
2020-10-29 03:38:15 +00:00
|
|
|
@click.version_option(version=__version__)
|
2017-10-27 07:08:24 +00:00
|
|
|
def cli():
|
|
|
|
"""
|
2021-08-28 00:48:54 +00:00
|
|
|
Datasette is an open source multi-tool for exploring and publishing data
|
|
|
|
|
|
|
|
\b
|
|
|
|
About Datasette: https://datasette.io/
|
|
|
|
Full documentation: https://docs.datasette.io/
|
2017-10-27 07:08:24 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.argument("files", type=click.Path(exists=True), nargs=-1)
|
2019-05-11 21:36:57 +00:00
|
|
|
@click.option("--inspect-file", default="-")
|
2020-12-29 22:16:05 +00:00
|
|
|
@sqlite_extensions
|
2017-12-07 16:57:31 +00:00
|
|
|
def inspect(files, inspect_file, sqlite_extensions):
|
2022-01-14 00:07:30 +00:00
|
|
|
"""
|
|
|
|
Generate JSON summary of provided database files
|
|
|
|
|
|
|
|
This can then be passed to "datasette --inspect-file" to speed up count
|
|
|
|
operations against immutable database files.
|
|
|
|
"""
|
2021-10-10 01:25:33 +00:00
|
|
|
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
inspect_data = loop.run_until_complete(inspect_(files, sqlite_extensions))
|
2021-03-11 16:15:49 +00:00
|
|
|
if inspect_file == "-":
|
|
|
|
sys.stdout.write(json.dumps(inspect_data, indent=2))
|
|
|
|
else:
|
|
|
|
with open(inspect_file, "w") as fp:
|
|
|
|
fp.write(json.dumps(inspect_data, indent=2))
|
2019-05-11 21:36:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def inspect_(files, sqlite_extensions):
|
|
|
|
app = Datasette([], immutables=files, sqlite_extensions=sqlite_extensions)
|
|
|
|
data = {}
|
|
|
|
for name, database in app.databases.items():
|
2020-12-21 19:48:06 +00:00
|
|
|
if name == "_internal":
|
|
|
|
# Don't include the in-memory _internal database
|
2020-12-18 22:34:05 +00:00
|
|
|
continue
|
2019-05-11 21:36:57 +00:00
|
|
|
counts = await database.table_counts(limit=3600 * 1000)
|
|
|
|
data[name] = {
|
|
|
|
"hash": database.hash,
|
|
|
|
"size": database.size,
|
|
|
|
"file": database.path,
|
|
|
|
"tables": {
|
|
|
|
table_name: {"count": table_count}
|
|
|
|
for table_name, table_count in counts.items()
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return data
|
2017-10-27 07:08:24 +00:00
|
|
|
|
|
|
|
|
2020-04-04 23:04:33 +00:00
|
|
|
@cli.group()
|
publish_subcommand hook + default plugins mechanism, used for publish heroku/now (#349)
This change introduces a new plugin hook, publish_subcommand, which can be
used to implement new subcommands for the "datasette publish" command family.
I've used this new hook to refactor out the "publish now" and "publish heroku"
implementations into separate modules. I've also added unit tests for these
two publishers, mocking the subprocess.call and subprocess.check_output
functions.
As part of this, I introduced a mechanism for loading default plugins. These
are defined in the new "default_plugins" list inside datasette/app.py
Closes #217 (Plugin support for datasette publish)
Closes #348 (Unit tests for "datasette publish")
Refs #14, #59, #102, #103, #146, #236, #347
2018-07-26 05:15:59 +00:00
|
|
|
def publish():
|
2020-12-23 17:04:32 +00:00
|
|
|
"""Publish specified SQLite database files to the internet along with a Datasette-powered interface and API"""
|
publish_subcommand hook + default plugins mechanism, used for publish heroku/now (#349)
This change introduces a new plugin hook, publish_subcommand, which can be
used to implement new subcommands for the "datasette publish" command family.
I've used this new hook to refactor out the "publish now" and "publish heroku"
implementations into separate modules. I've also added unit tests for these
two publishers, mocking the subprocess.call and subprocess.check_output
functions.
As part of this, I introduced a mechanism for loading default plugins. These
are defined in the new "default_plugins" list inside datasette/app.py
Closes #217 (Plugin support for datasette publish)
Closes #348 (Unit tests for "datasette publish")
Refs #14, #59, #102, #103, #146, #236, #347
2018-07-26 05:15:59 +00:00
|
|
|
pass
|
2017-11-15 19:53:00 +00:00
|
|
|
|
2018-07-14 13:10:49 +00:00
|
|
|
|
publish_subcommand hook + default plugins mechanism, used for publish heroku/now (#349)
This change introduces a new plugin hook, publish_subcommand, which can be
used to implement new subcommands for the "datasette publish" command family.
I've used this new hook to refactor out the "publish now" and "publish heroku"
implementations into separate modules. I've also added unit tests for these
two publishers, mocking the subprocess.call and subprocess.check_output
functions.
As part of this, I introduced a mechanism for loading default plugins. These
are defined in the new "default_plugins" list inside datasette/app.py
Closes #217 (Plugin support for datasette publish)
Closes #348 (Unit tests for "datasette publish")
Refs #14, #59, #102, #103, #146, #236, #347
2018-07-26 05:15:59 +00:00
|
|
|
# Register publish plugins
|
|
|
|
pm.hook.publish_subcommand(publish=publish)
|
2017-11-13 16:13:38 +00:00
|
|
|
|
2017-11-22 17:42:29 +00:00
|
|
|
|
2019-01-26 20:01:16 +00:00
|
|
|
@cli.command()
|
|
|
|
@click.option("--all", help="Include built-in default plugins", is_flag=True)
|
|
|
|
@click.option(
|
|
|
|
"--plugins-dir",
|
|
|
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
|
|
help="Path to directory containing custom plugins",
|
|
|
|
)
|
|
|
|
def plugins(all, plugins_dir):
|
2022-01-14 00:10:52 +00:00
|
|
|
"""List currently installed plugins"""
|
2019-01-26 20:01:16 +00:00
|
|
|
app = Datasette([], plugins_dir=plugins_dir)
|
2020-06-05 23:46:37 +00:00
|
|
|
click.echo(json.dumps(app._plugins(all=all), indent=4))
|
2019-01-26 20:01:16 +00:00
|
|
|
|
|
|
|
|
2017-11-13 16:13:38 +00:00
|
|
|
@cli.command()
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.argument("files", type=click.Path(exists=True), nargs=-1, required=True)
|
|
|
|
@click.option(
|
|
|
|
"-t",
|
|
|
|
"--tag",
|
|
|
|
help="Name for the resulting Docker container, can optionally use name:tag format",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"-m",
|
|
|
|
"--metadata",
|
|
|
|
type=click.File(mode="r"),
|
2020-04-02 19:30:53 +00:00
|
|
|
help="Path to JSON/YAML file containing metadata to publish",
|
2018-04-18 14:14:21 +00:00
|
|
|
)
|
|
|
|
@click.option("--extra-options", help="Extra options to pass to datasette serve")
|
2021-03-23 16:19:41 +00:00
|
|
|
@click.option("--branch", help="Install datasette from a GitHub branch e.g. main")
|
2017-11-13 16:13:38 +00:00
|
|
|
@click.option(
|
2018-04-18 14:14:21 +00:00
|
|
|
"--template-dir",
|
|
|
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
|
|
help="Path to directory containing custom templates",
|
2017-11-13 16:13:38 +00:00
|
|
|
)
|
|
|
|
@click.option(
|
2018-04-18 14:14:21 +00:00
|
|
|
"--plugins-dir",
|
|
|
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
|
|
help="Path to directory containing custom plugins",
|
2017-11-13 16:13:38 +00:00
|
|
|
)
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.option(
|
|
|
|
"--static",
|
|
|
|
type=StaticMount(),
|
2019-11-26 02:31:42 +00:00
|
|
|
help="Serve static files from this directory at /MOUNT/...",
|
2018-04-18 14:14:21 +00:00
|
|
|
multiple=True,
|
|
|
|
)
|
2018-04-18 14:48:34 +00:00
|
|
|
@click.option(
|
2019-05-04 02:15:14 +00:00
|
|
|
"--install", help="Additional packages (e.g. plugins) to install", multiple=True
|
2018-05-31 14:16:50 +00:00
|
|
|
)
|
2019-05-04 02:15:14 +00:00
|
|
|
@click.option("--spatialite", is_flag=True, help="Enable SpatialLite extension")
|
2018-06-17 20:14:55 +00:00
|
|
|
@click.option("--version-note", help="Additional note to show on /-/versions")
|
2020-06-11 16:02:03 +00:00
|
|
|
@click.option(
|
|
|
|
"--secret",
|
|
|
|
help="Secret used for signing secure values, such as signed cookies",
|
|
|
|
envvar="DATASETTE_PUBLISH_SECRET",
|
|
|
|
default=lambda: os.urandom(32).hex(),
|
|
|
|
)
|
2020-01-29 22:46:43 +00:00
|
|
|
@click.option(
|
2020-09-02 22:24:55 +00:00
|
|
|
"-p",
|
|
|
|
"--port",
|
|
|
|
default=8001,
|
2021-02-18 18:05:27 +00:00
|
|
|
type=click.IntRange(1, 65535),
|
2020-09-02 22:24:55 +00:00
|
|
|
help="Port to run the server on, defaults to 8001",
|
2020-01-29 22:46:43 +00:00
|
|
|
)
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.option("--title", help="Title for metadata")
|
|
|
|
@click.option("--license", help="License label for metadata")
|
|
|
|
@click.option("--license_url", help="License URL for metadata")
|
|
|
|
@click.option("--source", help="Source label for metadata")
|
|
|
|
@click.option("--source_url", help="Source URL for metadata")
|
2019-03-10 21:37:11 +00:00
|
|
|
@click.option("--about", help="About label for metadata")
|
|
|
|
@click.option("--about_url", help="About URL for metadata")
|
2018-04-18 14:14:21 +00:00
|
|
|
def package(
|
|
|
|
files,
|
|
|
|
tag,
|
|
|
|
metadata,
|
|
|
|
extra_options,
|
|
|
|
branch,
|
|
|
|
template_dir,
|
|
|
|
plugins_dir,
|
|
|
|
static,
|
2018-04-18 14:48:34 +00:00
|
|
|
install,
|
2018-05-31 14:16:50 +00:00
|
|
|
spatialite,
|
2018-06-17 20:14:55 +00:00
|
|
|
version_note,
|
2020-06-11 16:02:03 +00:00
|
|
|
secret,
|
2020-01-29 22:46:43 +00:00
|
|
|
port,
|
2020-11-15 23:24:22 +00:00
|
|
|
**extra_metadata,
|
2018-04-18 14:14:21 +00:00
|
|
|
):
|
2022-08-18 21:52:04 +00:00
|
|
|
"""Package SQLite files into a Datasette Docker container"""
|
2018-04-18 14:14:21 +00:00
|
|
|
if not shutil.which("docker"):
|
2017-11-13 16:13:38 +00:00
|
|
|
click.secho(
|
|
|
|
' The package command requires "docker" to be installed and configured ',
|
2018-04-18 14:14:21 +00:00
|
|
|
bg="red",
|
|
|
|
fg="white",
|
2017-11-13 16:13:38 +00:00
|
|
|
bold=True,
|
|
|
|
err=True,
|
|
|
|
)
|
|
|
|
sys.exit(1)
|
2018-04-18 14:14:21 +00:00
|
|
|
with temporary_docker_directory(
|
|
|
|
files,
|
|
|
|
"datasette",
|
2020-06-11 16:02:03 +00:00
|
|
|
metadata=metadata,
|
|
|
|
extra_options=extra_options,
|
|
|
|
branch=branch,
|
|
|
|
template_dir=template_dir,
|
|
|
|
plugins_dir=plugins_dir,
|
|
|
|
static=static,
|
|
|
|
install=install,
|
|
|
|
spatialite=spatialite,
|
|
|
|
version_note=version_note,
|
|
|
|
secret=secret,
|
|
|
|
extra_metadata=extra_metadata,
|
2020-01-29 22:46:43 +00:00
|
|
|
port=port,
|
2018-04-18 14:14:21 +00:00
|
|
|
):
|
|
|
|
args = ["docker", "build"]
|
2017-11-13 16:13:38 +00:00
|
|
|
if tag:
|
2018-04-18 14:14:21 +00:00
|
|
|
args.append("-t")
|
2017-11-13 16:13:38 +00:00
|
|
|
args.append(tag)
|
2018-04-18 14:14:21 +00:00
|
|
|
args.append(".")
|
2017-11-13 16:13:38 +00:00
|
|
|
call(args)
|
2017-11-11 07:25:22 +00:00
|
|
|
|
|
|
|
|
2020-08-11 22:31:47 +00:00
|
|
|
@cli.command()
|
2023-03-06 22:27:30 +00:00
|
|
|
@click.argument("packages", nargs=-1)
|
2020-08-19 17:20:41 +00:00
|
|
|
@click.option(
|
|
|
|
"-U", "--upgrade", is_flag=True, help="Upgrade packages to latest version"
|
|
|
|
)
|
2023-03-06 22:27:30 +00:00
|
|
|
@click.option(
|
|
|
|
"-r",
|
|
|
|
"--requirement",
|
|
|
|
type=click.Path(exists=True),
|
|
|
|
help="Install from requirements file",
|
|
|
|
)
|
|
|
|
def install(packages, upgrade, requirement):
|
2022-01-14 00:09:38 +00:00
|
|
|
"""Install plugins and packages from PyPI into the same environment as Datasette"""
|
2023-03-06 22:27:30 +00:00
|
|
|
if not packages and not requirement:
|
|
|
|
raise click.UsageError("Please specify at least one package to install")
|
2020-08-19 17:20:41 +00:00
|
|
|
args = ["pip", "install"]
|
|
|
|
if upgrade:
|
|
|
|
args += ["--upgrade"]
|
2023-03-06 22:27:30 +00:00
|
|
|
if requirement:
|
|
|
|
args += ["-r", requirement]
|
2020-08-19 17:20:41 +00:00
|
|
|
args += list(packages)
|
|
|
|
sys.argv = args
|
2020-08-11 23:54:52 +00:00
|
|
|
run_module("pip", run_name="__main__")
|
2020-08-11 22:31:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
|
|
|
@click.argument("packages", nargs=-1, required=True)
|
|
|
|
@click.option("-y", "--yes", is_flag=True, help="Don't ask for confirmation")
|
|
|
|
def uninstall(packages, yes):
|
2022-01-14 00:09:38 +00:00
|
|
|
"""Uninstall plugins and Python packages from the Datasette environment"""
|
2020-08-11 23:54:52 +00:00
|
|
|
sys.argv = ["pip", "uninstall"] + list(packages) + (["-y"] if yes else [])
|
|
|
|
run_module("pip", run_name="__main__")
|
2020-08-11 22:31:47 +00:00
|
|
|
|
|
|
|
|
2017-10-27 07:08:24 +00:00
|
|
|
@cli.command()
|
2020-12-09 19:45:45 +00:00
|
|
|
@click.argument("files", type=click.Path(), nargs=-1)
|
2019-03-17 23:25:15 +00:00
|
|
|
@click.option(
|
|
|
|
"-i",
|
|
|
|
"--immutable",
|
|
|
|
type=click.Path(exists=True),
|
|
|
|
help="Database files to open in immutable mode",
|
|
|
|
multiple=True,
|
|
|
|
)
|
2017-11-17 14:13:35 +00:00
|
|
|
@click.option(
|
2019-11-01 21:57:49 +00:00
|
|
|
"-h",
|
|
|
|
"--host",
|
|
|
|
default="127.0.0.1",
|
|
|
|
help=(
|
|
|
|
"Host for server. Defaults to 127.0.0.1 which means only connections "
|
|
|
|
"from the local machine will be allowed. Use 0.0.0.0 to listen to "
|
|
|
|
"all IPs and allow access from other machines."
|
|
|
|
),
|
2017-11-17 14:13:35 +00:00
|
|
|
)
|
2019-12-22 15:42:30 +00:00
|
|
|
@click.option(
|
|
|
|
"-p",
|
|
|
|
"--port",
|
|
|
|
default=8001,
|
2021-02-18 18:05:27 +00:00
|
|
|
type=click.IntRange(0, 65535),
|
2019-12-22 15:42:30 +00:00
|
|
|
help="Port for server, defaults to 8001. Use -p 0 to automatically assign an available port.",
|
|
|
|
)
|
2021-07-10 23:37:30 +00:00
|
|
|
@click.option(
|
|
|
|
"--uds",
|
|
|
|
help="Bind to a Unix domain socket",
|
|
|
|
)
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.option(
|
|
|
|
"--reload",
|
|
|
|
is_flag=True,
|
2021-04-02 20:20:51 +00:00
|
|
|
help="Automatically reload if code or metadata change detected - useful for development",
|
2018-04-18 14:14:21 +00:00
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--cors", is_flag=True, help="Enable CORS by serving Access-Control-Allow-Origin: *"
|
|
|
|
)
|
2020-12-29 22:16:05 +00:00
|
|
|
@sqlite_extensions
|
2018-04-18 14:14:21 +00:00
|
|
|
@click.option(
|
|
|
|
"--inspect-file", help='Path to JSON file created using "datasette inspect"'
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"-m",
|
|
|
|
"--metadata",
|
|
|
|
type=click.File(mode="r"),
|
2020-04-02 19:30:53 +00:00
|
|
|
help="Path to JSON/YAML file containing license/source metadata",
|
2018-04-18 14:14:21 +00:00
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--template-dir",
|
|
|
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
|
|
help="Path to directory containing custom templates",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--plugins-dir",
|
|
|
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
|
|
help="Path to directory containing custom plugins",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--static",
|
|
|
|
type=StaticMount(),
|
2019-11-26 02:31:42 +00:00
|
|
|
help="Serve static files from this directory at /MOUNT/...",
|
2018-04-18 14:14:21 +00:00
|
|
|
multiple=True,
|
|
|
|
)
|
2021-01-28 22:48:56 +00:00
|
|
|
@click.option("--memory", is_flag=True, help="Make /_memory database available")
|
2018-05-18 05:08:26 +00:00
|
|
|
@click.option(
|
2018-05-20 17:01:49 +00:00
|
|
|
"--config",
|
|
|
|
type=Config(),
|
2020-11-24 20:01:47 +00:00
|
|
|
help="Deprecated: set config option using configname:value. Use --setting instead.",
|
|
|
|
multiple=True,
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--setting",
|
|
|
|
"settings",
|
|
|
|
type=Setting(),
|
2022-01-14 00:38:16 +00:00
|
|
|
help="Setting, see docs.datasette.io/en/stable/settings.html",
|
2018-05-18 05:08:26 +00:00
|
|
|
multiple=True,
|
|
|
|
)
|
2020-05-31 22:42:08 +00:00
|
|
|
@click.option(
|
|
|
|
"--secret",
|
|
|
|
help="Secret used for signing secure values, such as signed cookies",
|
|
|
|
envvar="DATASETTE_SECRET",
|
|
|
|
)
|
2020-06-01 01:03:17 +00:00
|
|
|
@click.option(
|
|
|
|
"--root",
|
|
|
|
help="Output URL that sets a cookie authenticating the root user",
|
|
|
|
is_flag=True,
|
|
|
|
)
|
2020-08-12 00:24:40 +00:00
|
|
|
@click.option(
|
2020-09-02 22:24:55 +00:00
|
|
|
"--get",
|
|
|
|
help="Run an HTTP GET request against this path, print results and exit",
|
2020-08-12 00:24:40 +00:00
|
|
|
)
|
2022-12-13 04:36:42 +00:00
|
|
|
@click.option(
|
|
|
|
"--token",
|
|
|
|
help="API token to send with --get requests",
|
|
|
|
)
|
2018-06-17 20:14:55 +00:00
|
|
|
@click.option("--version-note", help="Additional note to show on /-/versions")
|
2021-08-13 01:01:57 +00:00
|
|
|
@click.option("--help-settings", is_flag=True, help="Show available settings")
|
2020-09-11 18:37:55 +00:00
|
|
|
@click.option("--pdb", is_flag=True, help="Launch debugger on any errors")
|
2020-09-22 15:37:59 +00:00
|
|
|
@click.option(
|
|
|
|
"-o",
|
|
|
|
"--open",
|
|
|
|
"open_browser",
|
|
|
|
is_flag=True,
|
|
|
|
help="Open Datasette in your web browser",
|
|
|
|
)
|
2020-12-09 19:45:45 +00:00
|
|
|
@click.option(
|
|
|
|
"--create",
|
|
|
|
is_flag=True,
|
|
|
|
help="Create database files if they do not exist",
|
|
|
|
)
|
2021-02-18 22:09:12 +00:00
|
|
|
@click.option(
|
|
|
|
"--crossdb",
|
|
|
|
is_flag=True,
|
|
|
|
help="Enable cross-database joins using the /_memory database",
|
|
|
|
)
|
2022-05-17 19:40:05 +00:00
|
|
|
@click.option(
|
|
|
|
"--nolock",
|
|
|
|
is_flag=True,
|
|
|
|
help="Ignore locking, open locked files in read-only mode",
|
|
|
|
)
|
2021-02-12 00:52:16 +00:00
|
|
|
@click.option(
|
|
|
|
"--ssl-keyfile",
|
|
|
|
help="SSL key file",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--ssl-certfile",
|
|
|
|
help="SSL certificate file",
|
|
|
|
)
|
2018-04-18 14:14:21 +00:00
|
|
|
def serve(
|
|
|
|
files,
|
2019-03-17 23:25:15 +00:00
|
|
|
immutable,
|
2018-04-18 14:14:21 +00:00
|
|
|
host,
|
|
|
|
port,
|
2021-07-10 23:37:30 +00:00
|
|
|
uds,
|
2018-04-18 14:14:21 +00:00
|
|
|
reload,
|
|
|
|
cors,
|
|
|
|
sqlite_extensions,
|
|
|
|
inspect_file,
|
|
|
|
metadata,
|
|
|
|
template_dir,
|
|
|
|
plugins_dir,
|
|
|
|
static,
|
2019-03-14 23:42:38 +00:00
|
|
|
memory,
|
2018-05-20 17:01:49 +00:00
|
|
|
config,
|
2020-11-24 20:01:47 +00:00
|
|
|
settings,
|
2020-05-31 22:42:08 +00:00
|
|
|
secret,
|
2020-06-01 01:03:17 +00:00
|
|
|
root,
|
2020-08-12 00:24:40 +00:00
|
|
|
get,
|
2022-12-13 04:36:42 +00:00
|
|
|
token,
|
2018-06-17 20:14:55 +00:00
|
|
|
version_note,
|
2021-08-13 01:01:57 +00:00
|
|
|
help_settings,
|
2020-09-11 18:37:55 +00:00
|
|
|
pdb,
|
2020-09-22 15:37:59 +00:00
|
|
|
open_browser,
|
2020-12-09 19:45:45 +00:00
|
|
|
create,
|
2021-02-18 22:09:12 +00:00
|
|
|
crossdb,
|
2022-05-17 19:40:05 +00:00
|
|
|
nolock,
|
2021-02-12 00:52:16 +00:00
|
|
|
ssl_keyfile,
|
|
|
|
ssl_certfile,
|
2020-04-02 19:30:53 +00:00
|
|
|
return_instance=False,
|
2018-04-18 14:14:21 +00:00
|
|
|
):
|
2017-11-13 18:41:53 +00:00
|
|
|
"""Serve up specified SQLite database files with a web UI"""
|
2021-08-13 01:01:57 +00:00
|
|
|
if help_settings:
|
2018-05-20 17:01:49 +00:00
|
|
|
formatter = formatting.HelpFormatter()
|
2021-08-13 01:01:57 +00:00
|
|
|
with formatter.section("Settings"):
|
2019-05-04 02:15:14 +00:00
|
|
|
formatter.write_dl(
|
|
|
|
[
|
2020-11-15 23:24:22 +00:00
|
|
|
(option.name, f"{option.help} (default={option.default})")
|
2020-11-24 21:22:33 +00:00
|
|
|
for option in SETTINGS
|
2019-05-04 02:15:14 +00:00
|
|
|
]
|
|
|
|
)
|
2018-05-20 17:01:49 +00:00
|
|
|
click.echo(formatter.getvalue())
|
|
|
|
sys.exit(0)
|
2017-11-09 13:46:16 +00:00
|
|
|
if reload:
|
|
|
|
import hupper
|
2018-04-18 14:14:21 +00:00
|
|
|
|
|
|
|
reloader = hupper.start_reloader("datasette.cli.serve")
|
2020-02-24 19:44:59 +00:00
|
|
|
if immutable:
|
|
|
|
reloader.watch_files(immutable)
|
2017-12-07 16:42:28 +00:00
|
|
|
if metadata:
|
|
|
|
reloader.watch_files([metadata.name])
|
2017-11-09 13:46:16 +00:00
|
|
|
|
2017-11-13 15:20:02 +00:00
|
|
|
inspect_data = None
|
|
|
|
if inspect_file:
|
2021-03-11 16:15:49 +00:00
|
|
|
with open(inspect_file) as fp:
|
|
|
|
inspect_data = json.load(fp)
|
2017-11-13 15:20:02 +00:00
|
|
|
|
|
|
|
metadata_data = None
|
2017-11-11 20:10:51 +00:00
|
|
|
if metadata:
|
2020-04-02 19:30:53 +00:00
|
|
|
metadata_data = parse_metadata(metadata.read())
|
2017-11-11 20:10:51 +00:00
|
|
|
|
2021-08-13 01:10:36 +00:00
|
|
|
combined_settings = {}
|
2020-11-24 20:01:47 +00:00
|
|
|
if config:
|
|
|
|
click.echo(
|
|
|
|
"--config name:value will be deprecated in Datasette 1.0, use --setting name value instead",
|
|
|
|
err=True,
|
|
|
|
)
|
2021-08-13 01:10:36 +00:00
|
|
|
combined_settings.update(config)
|
|
|
|
combined_settings.update(settings)
|
2020-11-24 20:01:47 +00:00
|
|
|
|
2020-04-27 16:30:24 +00:00
|
|
|
kwargs = dict(
|
2019-03-17 23:25:15 +00:00
|
|
|
immutables=immutable,
|
2020-10-10 23:39:38 +00:00
|
|
|
cache_headers=not reload,
|
2017-11-13 18:17:42 +00:00
|
|
|
cors=cors,
|
2017-11-13 15:20:02 +00:00
|
|
|
inspect_data=inspect_data,
|
|
|
|
metadata=metadata_data,
|
2017-11-17 14:13:35 +00:00
|
|
|
sqlite_extensions=sqlite_extensions,
|
2017-11-30 16:05:01 +00:00
|
|
|
template_dir=template_dir,
|
2018-04-16 05:22:01 +00:00
|
|
|
plugins_dir=plugins_dir,
|
2017-12-03 16:33:36 +00:00
|
|
|
static_mounts=static,
|
2021-08-13 01:10:36 +00:00
|
|
|
settings=combined_settings,
|
2019-03-14 23:42:38 +00:00
|
|
|
memory=memory,
|
2020-05-31 22:42:08 +00:00
|
|
|
secret=secret,
|
2018-06-17 20:14:55 +00:00
|
|
|
version_note=version_note,
|
2020-09-11 18:37:55 +00:00
|
|
|
pdb=pdb,
|
2021-02-18 22:09:12 +00:00
|
|
|
crossdb=crossdb,
|
2022-05-17 19:40:05 +00:00
|
|
|
nolock=nolock,
|
2017-11-13 18:03:52 +00:00
|
|
|
)
|
2020-04-27 16:30:24 +00:00
|
|
|
|
|
|
|
# if files is a single directory, use that as config_dir=
|
|
|
|
if 1 == len(files) and os.path.isdir(files[0]):
|
|
|
|
kwargs["config_dir"] = pathlib.Path(files[0])
|
|
|
|
files = []
|
|
|
|
|
2020-12-09 19:45:45 +00:00
|
|
|
# Verify list of files, create if needed (and --create)
|
|
|
|
for file in files:
|
|
|
|
if not pathlib.Path(file).exists():
|
|
|
|
if create:
|
|
|
|
sqlite3.connect(file).execute("vacuum")
|
|
|
|
else:
|
|
|
|
raise click.ClickException(
|
|
|
|
"Invalid value for '[FILES]...': Path '{}' does not exist.".format(
|
|
|
|
file
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-02-07 23:28:46 +00:00
|
|
|
# De-duplicate files so 'datasette db.db db.db' only attaches one /db
|
2022-02-07 23:43:20 +00:00
|
|
|
files = list(dict.fromkeys(files))
|
2022-02-07 23:28:46 +00:00
|
|
|
|
2020-10-19 22:37:31 +00:00
|
|
|
try:
|
|
|
|
ds = Datasette(files, **kwargs)
|
|
|
|
except SpatialiteNotFound:
|
|
|
|
raise click.ClickException("Could not find SpatiaLite extension")
|
2020-11-24 20:37:29 +00:00
|
|
|
except StartupError as e:
|
|
|
|
raise click.ClickException(e.args[0])
|
2020-04-27 16:30:24 +00:00
|
|
|
|
2020-04-02 19:30:53 +00:00
|
|
|
if return_instance:
|
|
|
|
# Private utility mechanism for writing unit tests
|
|
|
|
return ds
|
|
|
|
|
2020-06-13 17:55:41 +00:00
|
|
|
# Run the "startup" plugin hooks
|
2021-10-10 01:25:33 +00:00
|
|
|
asyncio.get_event_loop().run_until_complete(ds.invoke_startup())
|
2020-06-13 17:55:41 +00:00
|
|
|
|
2021-06-03 04:45:03 +00:00
|
|
|
# Run async soundness checks - but only if we're not under pytest
|
2021-10-10 01:25:33 +00:00
|
|
|
asyncio.get_event_loop().run_until_complete(check_databases(ds))
|
2019-05-11 23:22:55 +00:00
|
|
|
|
2022-12-13 04:36:42 +00:00
|
|
|
if token and not get:
|
|
|
|
raise click.ClickException("--token can only be used with --get")
|
|
|
|
|
2020-08-15 20:38:15 +00:00
|
|
|
if get:
|
2020-10-09 16:11:24 +00:00
|
|
|
client = TestClient(ds)
|
2022-12-13 04:36:42 +00:00
|
|
|
headers = {}
|
|
|
|
if token:
|
|
|
|
headers["Authorization"] = "Bearer {}".format(token)
|
|
|
|
response = client.get(get, headers=headers)
|
2020-08-15 20:38:15 +00:00
|
|
|
click.echo(response.text)
|
2020-09-11 21:32:54 +00:00
|
|
|
exit_code = 0 if response.status == 200 else 1
|
|
|
|
sys.exit(exit_code)
|
2020-08-15 20:38:15 +00:00
|
|
|
return
|
|
|
|
|
2019-05-11 23:22:55 +00:00
|
|
|
# Start the server
|
2020-10-26 05:06:20 +00:00
|
|
|
url = None
|
2020-06-01 01:03:17 +00:00
|
|
|
if root:
|
2020-10-20 05:21:19 +00:00
|
|
|
url = "http://{}:{}{}?token={}".format(
|
|
|
|
host, port, ds.urls.path("-/auth-token"), ds._root_token
|
|
|
|
)
|
2022-12-16 00:55:17 +00:00
|
|
|
click.echo(url)
|
2020-09-22 15:37:59 +00:00
|
|
|
if open_browser:
|
2020-10-26 05:06:20 +00:00
|
|
|
if url is None:
|
|
|
|
# Figure out most convenient URL - to table, database or homepage
|
2021-10-10 01:25:33 +00:00
|
|
|
path = asyncio.get_event_loop().run_until_complete(
|
|
|
|
initial_path_for_datasette(ds)
|
|
|
|
)
|
2020-11-15 23:24:22 +00:00
|
|
|
url = f"http://{host}:{port}{path}"
|
2020-09-22 14:26:47 +00:00
|
|
|
webbrowser.open(url)
|
2021-02-12 00:52:16 +00:00
|
|
|
uvicorn_kwargs = dict(
|
|
|
|
host=host, port=port, log_level="info", lifespan="on", workers=1
|
2020-10-08 23:16:55 +00:00
|
|
|
)
|
2021-07-10 23:37:30 +00:00
|
|
|
if uds:
|
|
|
|
uvicorn_kwargs["uds"] = uds
|
2021-02-12 00:52:16 +00:00
|
|
|
if ssl_keyfile:
|
|
|
|
uvicorn_kwargs["ssl_keyfile"] = ssl_keyfile
|
|
|
|
if ssl_certfile:
|
|
|
|
uvicorn_kwargs["ssl_certfile"] = ssl_certfile
|
|
|
|
uvicorn.run(ds.app(), **uvicorn_kwargs)
|
2020-02-15 17:56:48 +00:00
|
|
|
|
|
|
|
|
2022-12-13 04:18:42 +00:00
|
|
|
@cli.command()
|
|
|
|
@click.argument("id")
|
|
|
|
@click.option(
|
|
|
|
"--secret",
|
|
|
|
help="Secret used for signing the API tokens",
|
|
|
|
envvar="DATASETTE_SECRET",
|
|
|
|
required=True,
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"-e",
|
|
|
|
"--expires-after",
|
|
|
|
help="Token should expire after this many seconds",
|
|
|
|
type=int,
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"alls",
|
|
|
|
"-a",
|
|
|
|
"--all",
|
|
|
|
type=str,
|
|
|
|
metavar="ACTION",
|
|
|
|
multiple=True,
|
|
|
|
help="Restrict token to this action",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"databases",
|
|
|
|
"-d",
|
|
|
|
"--database",
|
|
|
|
type=(str, str),
|
|
|
|
metavar="DB ACTION",
|
|
|
|
multiple=True,
|
|
|
|
help="Restrict token to this action on this database",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"resources",
|
|
|
|
"-r",
|
|
|
|
"--resource",
|
|
|
|
type=(str, str, str),
|
|
|
|
metavar="DB RESOURCE ACTION",
|
|
|
|
multiple=True,
|
|
|
|
help="Restrict token to this action on this database resource (a table, SQL view or named query)",
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--debug",
|
|
|
|
help="Show decoded token",
|
|
|
|
is_flag=True,
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
"--plugins-dir",
|
|
|
|
type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
|
|
help="Path to directory containing custom plugins",
|
|
|
|
)
|
|
|
|
def create_token(
|
|
|
|
id, secret, expires_after, alls, databases, resources, debug, plugins_dir
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Create a signed API token for the specified actor ID
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
|
|
|
datasette create-token root --secret mysecret
|
|
|
|
|
2022-12-13 22:28:59 +00:00
|
|
|
To allow only "view-database-download" for all databases:
|
2022-12-13 04:18:42 +00:00
|
|
|
|
|
|
|
\b
|
|
|
|
datasette create-token root --secret mysecret \\
|
2022-12-13 22:28:59 +00:00
|
|
|
--all view-database-download
|
2022-12-13 04:18:42 +00:00
|
|
|
|
2022-12-13 22:28:59 +00:00
|
|
|
To allow "create-table" against a specific database:
|
|
|
|
|
|
|
|
\b
|
|
|
|
datasette create-token root --secret mysecret \\
|
|
|
|
--database mydb create-table
|
|
|
|
|
|
|
|
To allow "insert-row" against a specific table:
|
2022-12-13 04:18:42 +00:00
|
|
|
|
|
|
|
\b
|
|
|
|
datasette create-token root --secret myscret \\
|
|
|
|
--resource mydb mytable insert-row
|
|
|
|
|
|
|
|
Restricted actions can be specified multiple times using
|
|
|
|
multiple --all, --database, and --resource options.
|
|
|
|
|
|
|
|
Add --debug to see a decoded version of the token.
|
|
|
|
"""
|
|
|
|
ds = Datasette(secret=secret, plugins_dir=plugins_dir)
|
|
|
|
|
|
|
|
# Run ds.invoke_startup() in an event loop
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
loop.run_until_complete(ds.invoke_startup())
|
|
|
|
|
2022-12-14 02:42:01 +00:00
|
|
|
# Warn about any unknown actions
|
|
|
|
actions = []
|
|
|
|
actions.extend(alls)
|
|
|
|
actions.extend([p[1] for p in databases])
|
|
|
|
actions.extend([p[2] for p in resources])
|
|
|
|
for action in actions:
|
|
|
|
if not ds.permissions.get(action):
|
2022-12-13 04:18:42 +00:00
|
|
|
click.secho(
|
2022-12-14 02:42:01 +00:00
|
|
|
f" Unknown permission: {action} ",
|
2022-12-13 04:18:42 +00:00
|
|
|
fg="red",
|
|
|
|
err=True,
|
|
|
|
)
|
2022-12-14 02:42:01 +00:00
|
|
|
|
|
|
|
restrict_database = {}
|
|
|
|
for database, action in databases:
|
|
|
|
restrict_database.setdefault(database, []).append(action)
|
|
|
|
restrict_resource = {}
|
|
|
|
for database, resource, action in resources:
|
|
|
|
restrict_resource.setdefault(database, {}).setdefault(resource, []).append(
|
|
|
|
action
|
|
|
|
)
|
|
|
|
|
|
|
|
token = ds.create_token(
|
|
|
|
id,
|
|
|
|
expires_after=expires_after,
|
|
|
|
restrict_all=alls,
|
|
|
|
restrict_database=restrict_database,
|
|
|
|
restrict_resource=restrict_resource,
|
|
|
|
)
|
|
|
|
click.echo(token)
|
2022-12-13 04:18:42 +00:00
|
|
|
if debug:
|
2022-12-14 02:42:01 +00:00
|
|
|
encoded = token[len("dstok_") :]
|
2022-12-13 04:18:42 +00:00
|
|
|
click.echo("\nDecoded:\n")
|
2022-12-14 02:42:01 +00:00
|
|
|
click.echo(json.dumps(ds.unsign(encoded, namespace="token"), indent=2))
|
2022-12-13 04:18:42 +00:00
|
|
|
|
|
|
|
|
2021-08-28 01:39:42 +00:00
|
|
|
pm.hook.register_commands(cli=cli)
|
|
|
|
|
|
|
|
|
2020-02-15 17:56:48 +00:00
|
|
|
async def check_databases(ds):
|
|
|
|
# Run check_connection against every connected database
|
|
|
|
# to confirm they are all usable
|
|
|
|
for database in list(ds.databases.values()):
|
|
|
|
try:
|
2020-05-08 14:16:39 +00:00
|
|
|
await database.execute_fn(check_connection)
|
2020-02-15 17:56:48 +00:00
|
|
|
except SpatialiteConnectionProblem:
|
2020-11-29 20:13:16 +00:00
|
|
|
suggestion = ""
|
|
|
|
try:
|
|
|
|
find_spatialite()
|
|
|
|
suggestion = "\n\nTry adding the --load-extension=spatialite option."
|
|
|
|
except SpatialiteNotFound:
|
|
|
|
pass
|
2020-02-15 17:56:48 +00:00
|
|
|
raise click.UsageError(
|
|
|
|
"It looks like you're trying to load a SpatiaLite"
|
2020-11-29 20:13:16 +00:00
|
|
|
+ " database without first loading the SpatiaLite module."
|
|
|
|
+ suggestion
|
|
|
|
+ "\n\nRead more: https://docs.datasette.io/en/stable/spatialite.html"
|
2020-02-15 17:56:48 +00:00
|
|
|
)
|
|
|
|
except ConnectionProblem as e:
|
|
|
|
raise click.UsageError(
|
2020-11-15 23:24:22 +00:00
|
|
|
f"Connection to {database.path} failed check: {str(e.args[0])}"
|
2020-02-15 17:56:48 +00:00
|
|
|
)
|
2021-02-18 22:09:12 +00:00
|
|
|
# If --crossdb and more than SQLITE_LIMIT_ATTACHED show warning
|
|
|
|
if (
|
|
|
|
ds.crossdb
|
|
|
|
and len([db for db in ds.databases.values() if not db.is_memory])
|
|
|
|
> SQLITE_LIMIT_ATTACHED
|
|
|
|
):
|
|
|
|
msg = (
|
|
|
|
"Warning: --crossdb only works with the first {} attached databases".format(
|
|
|
|
SQLITE_LIMIT_ATTACHED
|
|
|
|
)
|
|
|
|
)
|
|
|
|
click.echo(click.style(msg, bold=True, fg="yellow"), err=True)
|