kopia lustrzana https://github.com/simonw/datasette
package and publish commands now accept --static and --template-dir
Example usage: datasette package --static css:extra-css/ --static js:extra-js/ \ sf-trees.db --template-dir templates/ --tag sf-trees --branch master This creates a local Docker image that includes copies of the templates/, extra-css/ and extra-js/ directories. You can then run it like this: docker run -p 8001:8001 sf-trees For publishing to Zeit now: datasette publish now --static css:extra-css/ --static js:extra-js/ \ sf-trees.db --template-dir templates/ --name sf-trees --branch master Example: https://sf-trees-wbihszoazc.now.sh/sf-trees-02c8ef1/Street_Tree_List For publishing to Heroku: datasette publish heroku --static css:extra-css/ --static js:extra-js/ \ sf-trees.db --template-dir templates/ --branch master Closes #157, #160pull/178/head
rodzic
2cc14a236c
commit
c195ee4d46
|
@ -11,6 +11,18 @@ from .utils import (
|
|||
)
|
||||
|
||||
|
||||
class StaticMount(click.ParamType):
|
||||
name = 'static mount'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if ':' not in value:
|
||||
self.fail('"%s" should be of format mountpoint:directory' % value, param, ctx)
|
||||
path, dirpath = value.split(':')
|
||||
if not os.path.exists(dirpath) or not os.path.isdir(dirpath):
|
||||
self.fail('%s is not a valid directory path' % value, param, ctx)
|
||||
return path, dirpath
|
||||
|
||||
|
||||
@click.group(cls=DefaultGroup, default='serve', default_if_no_args=True)
|
||||
@click.version_option()
|
||||
def cli():
|
||||
|
@ -45,12 +57,14 @@ def inspect(files, inspect_file, sqlite_extensions):
|
|||
@click.option('--extra-options', help='Extra options to pass to datasette serve')
|
||||
@click.option('--force', is_flag=True, help='Pass --force option to now')
|
||||
@click.option('--branch', help='Install datasette from a GitHub branch e.g. master')
|
||||
@click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates')
|
||||
@click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True)
|
||||
@click.option('--title', help='Title for metadata')
|
||||
@click.option('--license', help='License label for metadata')
|
||||
@click.option('--license_url', help='License URL for metadata')
|
||||
@click.option('--source', help='Source label for metadata')
|
||||
@click.option('--source_url', help='Source URL for metadata')
|
||||
def publish(publisher, files, name, metadata, extra_options, force, branch, **extra_metadata):
|
||||
def publish(publisher, files, name, metadata, extra_options, force, branch, template_dir, static, **extra_metadata):
|
||||
"""
|
||||
Publish specified SQLite database files to the internet along with a datasette API.
|
||||
|
||||
|
@ -80,7 +94,7 @@ def publish(publisher, files, name, metadata, extra_options, force, branch, **ex
|
|||
|
||||
if publisher == 'now':
|
||||
_fail_if_publish_binary_not_installed('now', 'Zeit Now', 'https://zeit.co/now')
|
||||
with temporary_docker_directory(files, name, metadata, extra_options, branch, extra_metadata):
|
||||
with temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata):
|
||||
if force:
|
||||
call(['now', '--force'])
|
||||
else:
|
||||
|
@ -96,7 +110,7 @@ def publish(publisher, files, name, metadata, extra_options, force, branch, **ex
|
|||
click.confirm('Install it? (this will run `heroku plugins:install heroku-builds`)', abort=True)
|
||||
call(["heroku", "plugins:install", "heroku-builds"])
|
||||
|
||||
with temporary_heroku_directory(files, name, metadata, extra_options, branch, extra_metadata):
|
||||
with temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata):
|
||||
create_output = check_output(
|
||||
['heroku', 'apps:create', '--json']
|
||||
).decode('utf8')
|
||||
|
@ -174,12 +188,14 @@ def skeleton(files, metadata, sqlite_extensions):
|
|||
)
|
||||
@click.option('--extra-options', help='Extra options to pass to datasette serve')
|
||||
@click.option('--branch', help='Install datasette from a GitHub branch e.g. master')
|
||||
@click.option('--template-dir', type=click.Path(exists=True, file_okay=False, dir_okay=True), help='Path to directory containing custom templates')
|
||||
@click.option('--static', type=StaticMount(), help='mountpoint:path-to-directory for serving static files', multiple=True)
|
||||
@click.option('--title', help='Title for metadata')
|
||||
@click.option('--license', help='License label for metadata')
|
||||
@click.option('--license_url', help='License URL for metadata')
|
||||
@click.option('--source', help='Source label for metadata')
|
||||
@click.option('--source_url', help='Source URL for metadata')
|
||||
def package(files, tag, metadata, extra_options, branch, **extra_metadata):
|
||||
def package(files, tag, metadata, extra_options, branch, template_dir, static, **extra_metadata):
|
||||
"Package specified SQLite files into a new datasette Docker container"
|
||||
if not shutil.which('docker'):
|
||||
click.secho(
|
||||
|
@ -190,7 +206,7 @@ def package(files, tag, metadata, extra_options, branch, **extra_metadata):
|
|||
err=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
with temporary_docker_directory(files, 'datasette', metadata, extra_options, branch, extra_metadata):
|
||||
with temporary_docker_directory(files, 'datasette', metadata, extra_options, branch, template_dir, static, extra_metadata):
|
||||
args = ['docker', 'build']
|
||||
if tag:
|
||||
args.append('-t')
|
||||
|
@ -199,18 +215,6 @@ def package(files, tag, metadata, extra_options, branch, **extra_metadata):
|
|||
call(args)
|
||||
|
||||
|
||||
class StaticMount(click.ParamType):
|
||||
name = 'static mount'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if ':' not in value:
|
||||
self.fail('"%s" should be of format mountpoint:directory' % value, param, ctx)
|
||||
path, dirpath = value.split(':')
|
||||
if not os.path.exists(dirpath) or not os.path.isdir(dirpath):
|
||||
self.fail('%s is not a valid directory path' % value, param, ctx)
|
||||
return path, dirpath
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('files', type=click.Path(exists=True), nargs=-1)
|
||||
@click.option('-h', '--host', default='127.0.0.1', help='host for server, defaults to 127.0.0.1')
|
||||
|
|
|
@ -132,12 +132,17 @@ def escape_sqlite_table_name(s):
|
|||
return '[{}]'.format(s)
|
||||
|
||||
|
||||
def make_dockerfile(files, metadata_file, extra_options='', branch=None):
|
||||
def make_dockerfile(files, metadata_file, extra_options, branch, template_dir, static):
|
||||
cmd = ['"datasette"', '"serve"', '"--host"', '"0.0.0.0"']
|
||||
cmd.append('"' + '", "'.join(files) + '"')
|
||||
cmd.extend(['"--cors"', '"--port"', '"8001"', '"--inspect-file"', '"inspect-data.json"'])
|
||||
if metadata_file:
|
||||
cmd.extend(['"--metadata"', '"{}"'.format(metadata_file)])
|
||||
if template_dir:
|
||||
cmd.extend(['"--template-dir"', '"templates/"'])
|
||||
if static:
|
||||
for mount_point, _ in static:
|
||||
cmd.extend(['"--static"', '"{}:{}"'.format(mount_point, mount_point)])
|
||||
if extra_options:
|
||||
for opt in extra_options.split():
|
||||
cmd.append('"{}"'.format(opt))
|
||||
|
@ -161,7 +166,7 @@ CMD [{cmd}]'''.format(
|
|||
|
||||
|
||||
@contextmanager
|
||||
def temporary_docker_directory(files, name, metadata, extra_options, branch=None, extra_metadata=None):
|
||||
def temporary_docker_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata=None):
|
||||
extra_metadata = extra_metadata or {}
|
||||
tmp = tempfile.TemporaryDirectory()
|
||||
# We create a datasette folder in there to get a nicer now deploy name
|
||||
|
@ -181,13 +186,30 @@ def temporary_docker_directory(files, name, metadata, extra_options, branch=None
|
|||
if value:
|
||||
metadata_content[key] = value
|
||||
try:
|
||||
dockerfile = make_dockerfile(file_names, metadata_content and 'metadata.json', extra_options, branch)
|
||||
dockerfile = make_dockerfile(
|
||||
file_names,
|
||||
metadata_content and 'metadata.json',
|
||||
extra_options,
|
||||
branch,
|
||||
template_dir,
|
||||
static,
|
||||
)
|
||||
os.chdir(datasette_dir)
|
||||
if metadata_content:
|
||||
open('metadata.json', 'w').write(json.dumps(metadata_content, indent=2))
|
||||
open('Dockerfile', 'w').write(dockerfile)
|
||||
for path, filename in zip(file_paths, file_names):
|
||||
link_or_copy(path, os.path.join(datasette_dir, filename))
|
||||
if template_dir:
|
||||
link_or_copy_directory(
|
||||
os.path.join(saved_cwd, template_dir),
|
||||
os.path.join(datasette_dir, 'templates')
|
||||
)
|
||||
for mount_point, path in static:
|
||||
link_or_copy_directory(
|
||||
os.path.join(saved_cwd, path),
|
||||
os.path.join(datasette_dir, mount_point)
|
||||
)
|
||||
yield datasette_dir
|
||||
finally:
|
||||
tmp.cleanup()
|
||||
|
@ -195,7 +217,7 @@ def temporary_docker_directory(files, name, metadata, extra_options, branch=None
|
|||
|
||||
|
||||
@contextmanager
|
||||
def temporary_heroku_directory(files, name, metadata, extra_options, branch=None, extra_metadata=None):
|
||||
def temporary_heroku_directory(files, name, metadata, extra_options, branch, template_dir, static, extra_metadata=None):
|
||||
# FIXME: lots of duplicated code from above
|
||||
|
||||
extra_metadata = extra_metadata or {}
|
||||
|
@ -235,9 +257,24 @@ def temporary_heroku_directory(files, name, metadata, extra_options, branch=None
|
|||
os.mkdir('bin')
|
||||
open('bin/post_compile', 'w').write('datasette inspect --inspect-file inspect-data.json')
|
||||
|
||||
quoted_files = " ".join(map(shlex.quote, files))
|
||||
procfile_cmd = 'web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json'.format(
|
||||
extras = []
|
||||
if template_dir:
|
||||
link_or_copy_directory(
|
||||
os.path.join(saved_cwd, template_dir),
|
||||
os.path.join(tmp.name, 'templates')
|
||||
)
|
||||
extras.extend(['--template-dir', 'templates/'])
|
||||
for mount_point, path in static:
|
||||
link_or_copy_directory(
|
||||
os.path.join(saved_cwd, path),
|
||||
os.path.join(tmp.name, mount_point)
|
||||
)
|
||||
extras.extend(['--static', '{}:{}'.format(mount_point, mount_point)])
|
||||
|
||||
quoted_files = " ".join(map(shlex.quote, file_names))
|
||||
procfile_cmd = 'web: datasette serve --host 0.0.0.0 {quoted_files} --cors --port $PORT --inspect-file inspect-data.json {extras}'.format(
|
||||
quoted_files=quoted_files,
|
||||
extras=' '.join(extras),
|
||||
)
|
||||
open('Procfile', 'w').write(procfile_cmd)
|
||||
|
||||
|
@ -503,5 +540,12 @@ def link_or_copy(src, dst):
|
|||
# https://github.com/simonw/datasette/issues/141
|
||||
try:
|
||||
os.link(src, dst)
|
||||
except OSError as e:
|
||||
except OSError:
|
||||
shutil.copyfile(src, dst)
|
||||
|
||||
|
||||
def link_or_copy_directory(src, dst):
|
||||
try:
|
||||
shutil.copytree(src, dst, copy_function=os.link)
|
||||
except OSError:
|
||||
shutil.copytree(src, dst)
|
||||
|
|
Ładowanie…
Reference in New Issue