Added --sql_time_limit_ms and --extra-options

The serve command now accepts --sql_time_limit_ms for customizing the SQL time
limit.

The publish and package commands now accept --extra-options which can be used
to specify additional options to be passed to the datasite serve command when
it executes inside the rusulting Docker containers.
pull/94/head
Simon Willison 2017-11-13 13:58:34 -08:00
rodzic 452c5f047e
commit 1e698787a4
4 zmienionych plików z 67 dodań i 15 usunięć

Wyświetl plik

@ -92,6 +92,7 @@ http://localhost:8001/History/downloads.jsono will return that data as JSON in a
--page_size INTEGER Page size - default is 100
--max_returned_rows INTEGER Max allowed rows to return at once - default is
1000. Set to 0 to disable check entirely.
--sql_time_limit_ms INTEGER Max time allowed for SQL queries in ms
--inspect-file TEXT Path to JSON file created using "datasette
build"
-m, --metadata FILENAME Path to JSON file containing license/source
@ -134,6 +135,7 @@ This will create a docker image containing both the datasette application and th
Options:
-n, --name TEXT Application name to use when deploying to Now
-m, --metadata FILENAME Path to JSON file containing metadata to publish
--extra-options TEXT Extra options to pass to datasette serve
--help Show this message and exit.
## datasette package
@ -149,4 +151,43 @@ If you have docker installed you can use `datasette package` to create a new Doc
-t, --tag TEXT Name for the resulting Docker container, can
optionally use name:tag format
-m, --metadata FILENAME Path to JSON file containing metadata to publish
--extra-options TEXT Extra options to pass to datasette serve
--help Show this message and exit.
Both publish and package accept an `extra_options` argument option, which will affect how the resulting application is executed. For example, say you want to increase the SQL time limit for a particular container:
datasette package parlgov.db --extra-options="--sql_time_limit_ms=2500 --page_size=10"
The resulting container will run the application with those options.
Here's example output for the package command:
$ datasette package parlgov.db --extra-options="--sql_time_limit_ms=2500 --page_size=10"
Sending build context to Docker daemon 4.459MB
Step 1/7 : FROM python:3
---> 79e1dc9af1c1
Step 2/7 : COPY . /app
---> Using cache
---> cd4ec67de656
Step 3/7 : WORKDIR /app
---> Using cache
---> 139699e91621
Step 4/7 : RUN pip install https://static.simonwillison.net/static/2017/datasette-0.9-py3-none-any.whl
---> Using cache
---> 340efa82bfd7
Step 5/7 : RUN datasette build parlgov.db --inspect-file inspect-data.json
---> Using cache
---> 5fddbe990314
Step 6/7 : EXPOSE 8001
---> Using cache
---> 8e83844b0fed
Step 7/7 : CMD datasette serve parlgov.db --port 8001 --inspect-file inspect-data.json --sql_time_limit_ms=2500 --page_size=10
---> Using cache
---> 1bd380ea8af3
Successfully built 1bd380ea8af3
You can now run the resulting container like so:
docker run -p 8081:8001 1bd380ea8af3
This exposes port 8001 inside the container as port 8081 on your host machine, so you can access the application at http://localhost:8081/

Wyświetl plik

@ -30,7 +30,6 @@ from .utils import (
app_root = Path(__file__).parent.parent
HASH_BLOCK_SIZE = 1024 * 1024
SQL_TIME_LIMIT_MS = 1000
connections = threading.local()
@ -122,11 +121,10 @@ class BaseView(HTTPMethodView):
conn.text_factory = lambda x: str(x, 'utf-8', 'replace')
setattr(connections, db_name, conn)
with sqlite_timelimit(conn, SQL_TIME_LIMIT_MS):
with sqlite_timelimit(conn, self.ds.sql_time_limit_ms):
try:
cursor = conn.cursor()
cursor.execute(sql, params or {})
description = None
if self.max_returned_rows and truncate:
rows = cursor.fetchmany(self.max_returned_rows + 1)
truncated = len(rows) > self.max_returned_rows
@ -510,7 +508,8 @@ class RowView(BaseView):
class Datasette:
def __init__(
self, files, num_threads=3, cache_headers=True, page_size=100,
max_returned_rows=1000, cors=False, inspect_data=None, metadata=None):
max_returned_rows=1000, sql_time_limit_ms=1000, cors=False,
inspect_data=None, metadata=None):
self.files = files
self.num_threads = num_threads
self.executor = futures.ThreadPoolExecutor(
@ -519,6 +518,7 @@ class Datasette:
self.cache_headers = cache_headers
self.page_size = page_size
self.max_returned_rows = max_returned_rows
self.sql_time_limit_ms = sql_time_limit_ms
self.cors = cors
self._inspect = inspect_data
self.metadata = metadata or {}

Wyświetl plik

@ -36,7 +36,8 @@ def build(files, inspect_file):
'-m', '--metadata', type=click.File(mode='r'),
help='Path to JSON file containing metadata to publish'
)
def publish(publisher, files, name, metadata):
@click.option('--extra-options', help='Extra options to pass to datasette serve')
def publish(publisher, files, name, metadata, extra_options):
"""
Publish specified SQLite database files to the internet along with a datasette API.
@ -56,7 +57,7 @@ def publish(publisher, files, name, metadata):
click.echo('Follow the instructions at https://zeit.co/now#whats-now', err=True)
sys.exit(1)
with temporary_docker_directory(files, name, metadata):
with temporary_docker_directory(files, name, metadata, extra_options):
call('now')
@ -70,7 +71,8 @@ def publish(publisher, files, name, metadata):
'-m', '--metadata', type=click.File(mode='r'),
help='Path to JSON file containing metadata to publish'
)
def package(files, tag, metadata):
@click.option('--extra-options', help='Extra options to pass to datasette serve')
def package(files, tag, metadata, extra_options):
"Package specified SQLite files into a new datasette Docker container"
if not shutil.which('docker'):
click.secho(
@ -81,7 +83,7 @@ def package(files, tag, metadata):
err=True,
)
sys.exit(1)
with temporary_docker_directory(files, 'datasette', metadata):
with temporary_docker_directory(files, 'datasette', metadata, extra_options):
args = ['docker', 'build']
if tag:
args.append('-t')
@ -99,9 +101,10 @@ def package(files, tag, metadata):
@click.option('--cors', is_flag=True, help='Enable CORS by serving Access-Control-Allow-Origin: *')
@click.option('--page_size', default=100, help='Page size - default is 100')
@click.option('--max_returned_rows', default=1000, help='Max allowed rows to return at once - default is 1000. Set to 0 to disable check entirely.')
@click.option('--sql_time_limit_ms', default=1000, help='Max time allowed for SQL queries in ms')
@click.option('--inspect-file', help='Path to JSON file created using "datasette build"')
@click.option('-m', '--metadata', type=click.File(mode='r'), help='Path to JSON file containing license/source metadata')
def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, inspect_file, metadata):
def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, sql_time_limit_ms, inspect_file, metadata):
"""Serve up specified SQLite database files with a web UI"""
if reload:
import hupper
@ -122,6 +125,7 @@ def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows,
cors=cors,
page_size=page_size,
max_returned_rows=max_returned_rows,
sql_time_limit_ms=sql_time_limit_ms,
inspect_data=inspect_data,
metadata=metadata_data,
)

Wyświetl plik

@ -135,7 +135,15 @@ def escape_sqlite_table_name(s):
return '[{}]'.format(s)
def make_dockerfile(files, metadata_file):
def make_dockerfile(files, metadata_file, extra_options=''):
cmd = ['"datasette"', '"serve"']
cmd.append('"' + '", "'.join(files) + '"')
cmd.extend(['"--port"', '"8001"', '"--inspect-file"', '"inspect-data.json"'])
if metadata_file:
cmd.extend(['"--metadata"', '"{}"'.format(metadata_file)])
if extra_options:
for opt in extra_options.split():
cmd.append('"{}"'.format(opt))
return '''
FROM python:3
COPY . /app
@ -143,15 +151,14 @@ WORKDIR /app
RUN pip install datasette
RUN datasette build {} --inspect-file inspect-data.json
EXPOSE 8001
CMD ["datasette", "serve", {}, "--port", "8001", "--cors", "--inspect-file", "inspect-data.json"{}]'''.format(
CMD [{}]'''.format(
' '.join(files),
'"' + '", "'.join(files) + '"',
metadata_file and ', "--metadata", "{}"'.format(metadata_file) or '',
', '.join(cmd)
).strip()
@contextmanager
def temporary_docker_directory(files, name, metadata):
def temporary_docker_directory(files, name, metadata, extra_options):
tmp = tempfile.TemporaryDirectory()
# We create a datasette folder in there to get a nicer now deploy name
datasette_dir = os.path.join(tmp.name, name)
@ -163,7 +170,7 @@ def temporary_docker_directory(files, name, metadata):
]
file_names = [os.path.split(f)[-1] for f in files]
try:
dockerfile = make_dockerfile(file_names, metadata and 'metadata.json')
dockerfile = make_dockerfile(file_names, metadata and 'metadata.json', extra_options)
os.chdir(datasette_dir)
open('Dockerfile', 'w').write(dockerfile)
if metadata: