2017-10-27 07:08:24 +00:00
|
|
|
import click
|
2017-11-04 23:53:50 +00:00
|
|
|
from click_default_group import DefaultGroup
|
2017-11-11 20:10:51 +00:00
|
|
|
import json
|
2017-11-11 16:00:00 +00:00
|
|
|
import shutil
|
2017-11-15 19:53:00 +00:00
|
|
|
from subprocess import call, check_output
|
2017-11-11 16:00:00 +00:00
|
|
|
import sys
|
2017-11-11 20:10:51 +00:00
|
|
|
from .app import Datasette
|
2017-11-13 16:13:38 +00:00
|
|
|
from .utils import (
|
2017-11-15 19:53:00 +00:00
|
|
|
temporary_docker_directory, temporary_heroku_directory
|
2017-11-13 16:13:38 +00:00
|
|
|
)
|
2017-10-27 07:08:24 +00:00
|
|
|
|
2017-11-04 23:53:50 +00:00
|
|
|
|
|
|
|
@click.group(cls=DefaultGroup, default='serve', default_if_no_args=True)
|
2017-11-19 05:59:16 +00:00
|
|
|
@click.version_option()
|
2017-10-27 07:08:24 +00:00
|
|
|
def cli():
|
|
|
|
"""
|
2017-11-10 18:38:35 +00:00
|
|
|
Datasette!
|
2017-10-27 07:08:24 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
2017-11-06 02:24:43 +00:00
|
|
|
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
2017-11-13 15:20:02 +00:00
|
|
|
@click.option('--inspect-file', default='inspect-data.json')
|
|
|
|
def build(files, inspect_file):
|
2017-11-11 20:10:51 +00:00
|
|
|
app = Datasette(files)
|
2017-11-13 15:20:02 +00:00
|
|
|
open(inspect_file, 'w').write(json.dumps(app.inspect(), indent=2))
|
2017-10-27 07:08:24 +00:00
|
|
|
|
|
|
|
|
2017-11-11 07:25:22 +00:00
|
|
|
@cli.command()
|
2017-11-15 19:53:00 +00:00
|
|
|
@click.argument('publisher', type=click.Choice(['now', 'heroku']))
|
2017-11-11 07:25:22 +00:00
|
|
|
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
2017-11-13 15:20:02 +00:00
|
|
|
@click.option(
|
|
|
|
'-n', '--name', default='datasette',
|
2017-11-21 18:10:48 +00:00
|
|
|
help='Application name to use when deploying to Now (ignored for Heroku)'
|
2017-11-13 15:20:02 +00:00
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
'-m', '--metadata', type=click.File(mode='r'),
|
|
|
|
help='Path to JSON file containing metadata to publish'
|
|
|
|
)
|
2017-11-13 21:58:34 +00:00
|
|
|
@click.option('--extra-options', help='Extra options to pass to datasette serve')
|
2017-11-14 01:48:03 +00:00
|
|
|
@click.option('--force', is_flag=True, help='Pass --force option to now')
|
2017-11-19 18:20:17 +00:00
|
|
|
@click.option('--branch', help='Install datasette from a GitHub branch e.g. master')
|
2017-11-15 05:02:11 +00:00
|
|
|
@click.option('--title', help='Title for metadata')
|
|
|
|
@click.option('--license', help='License label for metadata')
|
|
|
|
@click.option('--license_url', help='License URL for metadata')
|
|
|
|
@click.option('--source', help='Source label for metadata')
|
|
|
|
@click.option('--source_url', help='Source URL for metadata')
|
2017-11-19 18:20:17 +00:00
|
|
|
def publish(publisher, files, name, metadata, extra_options, force, branch, **extra_metadata):
|
2017-11-13 18:40:51 +00:00
|
|
|
"""
|
|
|
|
Publish specified SQLite database files to the internet along with a datasette API.
|
|
|
|
|
2017-11-21 18:10:48 +00:00
|
|
|
Options for PUBLISHER:
|
|
|
|
* 'now' - You must have Zeit Now installed: https://zeit.co/now
|
|
|
|
* 'heroku' - You must have Heroku installed: https://cli.heroku.com/
|
2017-11-13 18:40:51 +00:00
|
|
|
|
|
|
|
Example usage: datasette publish now my-database.db
|
|
|
|
"""
|
2017-11-21 18:10:48 +00:00
|
|
|
def _fail_if_publish_binary_not_installed(binary, publish_target, install_link):
|
|
|
|
"""Exit (with error message) if ``binary` isn't installed"""
|
|
|
|
if not shutil.which(binary):
|
2017-11-15 19:53:00 +00:00
|
|
|
click.secho(
|
2017-11-21 18:10:48 +00:00
|
|
|
f" Publishing to {publish_target} requires {binary} to be installed and configured ",
|
2017-11-15 19:53:00 +00:00
|
|
|
bg='red',
|
|
|
|
fg='white',
|
|
|
|
bold=True,
|
2017-11-21 18:10:48 +00:00
|
|
|
err=True
|
2017-11-15 19:53:00 +00:00
|
|
|
)
|
2017-11-21 18:10:48 +00:00
|
|
|
click.echo(f"Follow the instructions at {install_link}", err=True)
|
2017-11-15 19:53:00 +00:00
|
|
|
sys.exit(1)
|
2017-11-13 16:13:38 +00:00
|
|
|
|
2017-11-21 18:10:48 +00:00
|
|
|
if publisher == 'now':
|
|
|
|
_fail_if_publish_binary_not_installed('now', 'Zeit Now', 'https://zeit.co/now')
|
2017-11-21 18:19:42 +00:00
|
|
|
with temporary_docker_directory(files, name, metadata, extra_options, branch, extra_metadata):
|
2017-11-15 19:53:00 +00:00
|
|
|
if force:
|
|
|
|
call(['now', '--force'])
|
|
|
|
else:
|
|
|
|
call('now')
|
|
|
|
|
|
|
|
elif publisher == 'heroku':
|
2017-11-21 18:10:48 +00:00
|
|
|
_fail_if_publish_binary_not_installed('heroku', 'Heroku', 'https://cli.heroku.com')
|
|
|
|
|
|
|
|
# Check for heroku-builds plugin
|
|
|
|
plugins = [line.split()[0] for line in check_output(['heroku', 'plugins']).splitlines()]
|
2017-11-21 18:51:58 +00:00
|
|
|
if b'heroku-builds' not in plugins:
|
2017-11-21 18:10:48 +00:00
|
|
|
click.echo('Publishing to Heroku requires the heroku-builds plugin to be installed.')
|
|
|
|
click.confirm('Install it? (this will run `heroku plugins:install heroku-builds`)', abort=True)
|
|
|
|
call(["heroku", "plugins:install", "heroku-builds"])
|
|
|
|
|
2017-11-21 18:19:42 +00:00
|
|
|
with temporary_heroku_directory(files, name, metadata, extra_options, branch, extra_metadata):
|
2017-11-15 19:53:00 +00:00
|
|
|
create_output = check_output(['heroku', 'apps:create', '--json'])
|
|
|
|
app_name = json.loads(create_output)["name"]
|
|
|
|
call(["heroku", "builds:create", "-a", app_name])
|
2017-11-13 16:13:38 +00:00
|
|
|
|
|
|
|
@cli.command()
|
|
|
|
@click.argument('files', type=click.Path(exists=True), nargs=-1, required=True)
|
|
|
|
@click.option(
|
|
|
|
'-t', '--tag',
|
|
|
|
help='Name for the resulting Docker container, can optionally use name:tag format'
|
|
|
|
)
|
|
|
|
@click.option(
|
|
|
|
'-m', '--metadata', type=click.File(mode='r'),
|
|
|
|
help='Path to JSON file containing metadata to publish'
|
|
|
|
)
|
2017-11-13 21:58:34 +00:00
|
|
|
@click.option('--extra-options', help='Extra options to pass to datasette serve')
|
2017-11-19 18:20:17 +00:00
|
|
|
@click.option('--branch', help='Install datasette from a GitHub branch e.g. master')
|
2017-11-15 05:02:11 +00:00
|
|
|
@click.option('--title', help='Title for metadata')
|
|
|
|
@click.option('--license', help='License label for metadata')
|
|
|
|
@click.option('--license_url', help='License URL for metadata')
|
|
|
|
@click.option('--source', help='Source label for metadata')
|
|
|
|
@click.option('--source_url', help='Source URL for metadata')
|
2017-11-19 18:20:17 +00:00
|
|
|
def package(files, tag, metadata, extra_options, branch, **extra_metadata):
|
2017-11-13 16:13:38 +00:00
|
|
|
"Package specified SQLite files into a new datasette Docker container"
|
|
|
|
if not shutil.which('docker'):
|
|
|
|
click.secho(
|
|
|
|
' The package command requires "docker" to be installed and configured ',
|
|
|
|
bg='red',
|
|
|
|
fg='white',
|
|
|
|
bold=True,
|
|
|
|
err=True,
|
|
|
|
)
|
|
|
|
sys.exit(1)
|
2017-11-19 18:20:17 +00:00
|
|
|
with temporary_docker_directory(files, 'datasette', metadata, extra_options, branch, extra_metadata):
|
2017-11-13 16:13:38 +00:00
|
|
|
args = ['docker', 'build']
|
|
|
|
if tag:
|
|
|
|
args.append('-t')
|
|
|
|
args.append(tag)
|
|
|
|
args.append('.')
|
|
|
|
call(args)
|
2017-11-11 07:25:22 +00:00
|
|
|
|
|
|
|
|
2017-10-27 07:08:24 +00:00
|
|
|
@cli.command()
|
|
|
|
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
2017-11-15 05:08:46 +00:00
|
|
|
@click.option('-h', '--host', default='127.0.0.1', help='host for server, defaults to 127.0.0.1')
|
2017-11-13 18:41:53 +00:00
|
|
|
@click.option('-p', '--port', default=8001, help='port for server, defaults to 8001')
|
|
|
|
@click.option('--debug', is_flag=True, help='Enable debug mode - useful for development')
|
|
|
|
@click.option('--reload', is_flag=True, help='Automatically reload if code change detected - useful for development')
|
|
|
|
@click.option('--cors', is_flag=True, help='Enable CORS by serving Access-Control-Allow-Origin: *')
|
2017-11-13 19:33:01 +00:00
|
|
|
@click.option('--page_size', default=100, help='Page size - default is 100')
|
|
|
|
@click.option('--max_returned_rows', default=1000, help='Max allowed rows to return at once - default is 1000. Set to 0 to disable check entirely.')
|
2017-11-13 21:58:34 +00:00
|
|
|
@click.option('--sql_time_limit_ms', default=1000, help='Max time allowed for SQL queries in ms')
|
2017-11-17 14:13:35 +00:00
|
|
|
@click.option(
|
|
|
|
'sqlite_extensions', '--load-extension', envvar='SQLITE_EXTENSIONS', multiple=True,
|
|
|
|
type=click.Path(exists=True, resolve_path=True), help='Path to a SQLite extension to load'
|
|
|
|
)
|
2017-11-13 18:41:53 +00:00
|
|
|
@click.option('--inspect-file', help='Path to JSON file created using "datasette build"')
|
|
|
|
@click.option('-m', '--metadata', type=click.File(mode='r'), help='Path to JSON file containing license/source metadata')
|
2017-11-17 14:13:35 +00:00
|
|
|
def serve(files, host, port, debug, reload, cors, page_size, max_returned_rows, sql_time_limit_ms, sqlite_extensions, inspect_file, metadata):
|
2017-11-13 18:41:53 +00:00
|
|
|
"""Serve up specified SQLite database files with a web UI"""
|
2017-11-09 13:46:16 +00:00
|
|
|
if reload:
|
|
|
|
import hupper
|
2017-11-10 18:38:35 +00:00
|
|
|
hupper.start_reloader('datasette.cli.serve')
|
2017-11-09 13:46:16 +00:00
|
|
|
|
2017-11-13 15:20:02 +00:00
|
|
|
inspect_data = None
|
|
|
|
if inspect_file:
|
|
|
|
inspect_data = json.load(open(inspect_file))
|
|
|
|
|
|
|
|
metadata_data = None
|
2017-11-11 20:10:51 +00:00
|
|
|
if metadata:
|
2017-11-13 15:20:02 +00:00
|
|
|
metadata_data = json.loads(metadata.read())
|
2017-11-11 20:10:51 +00:00
|
|
|
|
2017-10-27 07:08:24 +00:00
|
|
|
click.echo('Serve! files={} on port {}'.format(files, port))
|
2017-11-13 18:03:52 +00:00
|
|
|
ds = Datasette(
|
2017-11-11 20:10:51 +00:00
|
|
|
files,
|
|
|
|
cache_headers=not debug and not reload,
|
2017-11-13 18:17:42 +00:00
|
|
|
cors=cors,
|
2017-11-13 19:33:01 +00:00
|
|
|
page_size=page_size,
|
|
|
|
max_returned_rows=max_returned_rows,
|
2017-11-13 21:58:34 +00:00
|
|
|
sql_time_limit_ms=sql_time_limit_ms,
|
2017-11-13 15:20:02 +00:00
|
|
|
inspect_data=inspect_data,
|
|
|
|
metadata=metadata_data,
|
2017-11-17 14:13:35 +00:00
|
|
|
sqlite_extensions=sqlite_extensions,
|
2017-11-13 18:03:52 +00:00
|
|
|
)
|
|
|
|
# Force initial hashing/table counting
|
|
|
|
ds.inspect()
|
|
|
|
ds.app().run(host=host, port=port, debug=debug)
|