kopia lustrzana https://github.com/jupyterhub/repo2docker
commit
e151f77d3a
|
@ -107,16 +107,19 @@ class Repo2Docker(Application):
|
|||
capture=self.json_logs):
|
||||
self.log.info(line, extra=dict(phase='fetching'))
|
||||
except subprocess.CalledProcessError:
|
||||
self.log.error('Failed to clone repository!', extra=dict(phase='failed'))
|
||||
self.log.error('Failed to clone repository!',
|
||||
extra=dict(phase='failed'))
|
||||
sys.exit(1)
|
||||
|
||||
if ref:
|
||||
try:
|
||||
for line in execute_cmd(['git', 'reset', '--hard', ref], cwd=checkout_path,
|
||||
for line in execute_cmd(['git', 'reset', '--hard', ref],
|
||||
cwd=checkout_path,
|
||||
capture=self.json_logs):
|
||||
self.log.info(line, extra=dict(phase='fetching'))
|
||||
except subprocess.CalledProcessError:
|
||||
self.log.error('Failed to check out ref %s', ref, extra=dict(phase='failed'))
|
||||
self.log.error('Failed to check out ref %s', ref,
|
||||
extra=dict(phase='failed'))
|
||||
sys.exit(1)
|
||||
|
||||
def get_argparser(self):
|
||||
|
@ -136,12 +139,14 @@ class Repo2Docker(Application):
|
|||
|
||||
argparser.add_argument(
|
||||
'repo',
|
||||
help='Path to repository that should be built. Could be local path or a git URL.'
|
||||
help=('Path to repository that should be built. Could be '
|
||||
'local path or a git URL.')
|
||||
)
|
||||
|
||||
argparser.add_argument(
|
||||
'--image-name',
|
||||
help='Name of image to be built. If unspecified will be autogenerated'
|
||||
help=('Name of image to be built. If unspecified will be '
|
||||
'autogenerated')
|
||||
)
|
||||
|
||||
argparser.add_argument(
|
||||
|
@ -159,7 +164,8 @@ class Repo2Docker(Application):
|
|||
'--no-build',
|
||||
dest='build',
|
||||
action='store_false',
|
||||
help="Do not actually build the image. Useful in conjunction with --debug."
|
||||
help=('Do not actually build the image. Useful in conjunction '
|
||||
'with --debug.')
|
||||
)
|
||||
|
||||
argparser.add_argument(
|
||||
|
@ -202,10 +208,8 @@ class Repo2Docker(Application):
|
|||
Avoids non-JSON output on errors when using --json-logs
|
||||
"""
|
||||
self.log.error("Error during build: %s", evalue,
|
||||
exc_info=(etype, evalue, traceback),
|
||||
extra=dict(phase='failed'),
|
||||
)
|
||||
|
||||
exc_info=(etype, evalue, traceback),
|
||||
extra=dict(phase='failed'))
|
||||
|
||||
def initialize(self):
|
||||
args = self.get_argparser().parse_args()
|
||||
|
@ -243,14 +247,20 @@ class Repo2Docker(Application):
|
|||
# remove the additional newline from the stream handler
|
||||
self.log.handlers[0].terminator = ''
|
||||
# We don't want a [Repo2Docker] on all messages
|
||||
self.log.handlers[0].formatter = logging.Formatter(fmt='%(message)s')
|
||||
self.log.handlers[0].formatter = logging.Formatter(
|
||||
fmt='%(message)s'
|
||||
)
|
||||
|
||||
if args.image_name:
|
||||
self.output_image_spec = args.image_name
|
||||
else:
|
||||
# Attempt to set a sane default!
|
||||
# HACK: Provide something more descriptive?
|
||||
self.output_image_spec = 'r2d' + escapism.escape(self.repo, escape_char='-').lower() + str(int(time.time()))
|
||||
self.output_image_spec = (
|
||||
'r2d' +
|
||||
escapism.escape(self.repo, escape_char='-').lower() +
|
||||
str(int(time.time()))
|
||||
)
|
||||
|
||||
self.push = args.push
|
||||
self.run = args.run
|
||||
|
@ -269,7 +279,8 @@ class Repo2Docker(Application):
|
|||
|
||||
def push_image(self):
|
||||
client = docker.APIClient(version='auto', **kwargs_from_env())
|
||||
# Build a progress setup for each layer, and only emit per-layer info every 1.5s
|
||||
# Build a progress setup for each layer, and only emit per-layer
|
||||
# info every 1.5s
|
||||
layers = {}
|
||||
last_emit_time = time.time()
|
||||
for line in client.push(self.output_image_spec, stream=True):
|
||||
|
@ -284,7 +295,8 @@ class Repo2Docker(Application):
|
|||
else:
|
||||
layers[progress['id']] = progress['status']
|
||||
if time.time() - last_emit_time > 1.5:
|
||||
self.log.info('Pushing image\n', extra=dict(progress=layers, phase='pushing'))
|
||||
self.log.info('Pushing image\n',
|
||||
extra=dict(progress=layers, phase='pushing'))
|
||||
last_emit_time = time.time()
|
||||
|
||||
def run_image(self):
|
||||
|
@ -292,8 +304,9 @@ class Repo2Docker(Application):
|
|||
port = self._get_free_port()
|
||||
if not self.run_cmd:
|
||||
port = str(self._get_free_port())
|
||||
run_cmd = ['jupyter', 'notebook', '--ip', '0.0.0.0', '--port', port]
|
||||
ports={'%s/tcp' % port: port}
|
||||
run_cmd = ['jupyter', 'notebook', '--ip', '0.0.0.0',
|
||||
'--port', port]
|
||||
ports = {'%s/tcp' % port: port}
|
||||
else:
|
||||
run_cmd = self.run_cmd
|
||||
ports = {}
|
||||
|
@ -309,11 +322,13 @@ class Repo2Docker(Application):
|
|||
|
||||
try:
|
||||
for line in container.logs(stream=True):
|
||||
self.log.info(line.decode('utf-8'), extra=dict(phase='running'))
|
||||
self.log.info(line.decode('utf-8'),
|
||||
extra=dict(phase='running'))
|
||||
finally:
|
||||
container.reload()
|
||||
if container.status == 'running':
|
||||
self.log.info('Stopping container...\n', extra=dict(phase='running'))
|
||||
self.log.info('Stopping container...\n',
|
||||
extra=dict(phase='running'))
|
||||
container.kill()
|
||||
exit_code = container.attrs['State']['ExitCode']
|
||||
container.remove()
|
||||
|
@ -325,7 +340,7 @@ class Repo2Docker(Application):
|
|||
"""
|
||||
import socket
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.bind(("",0))
|
||||
s.bind(("", 0))
|
||||
port = s.getsockname()[1]
|
||||
s.close()
|
||||
return port
|
||||
|
|
|
@ -116,6 +116,7 @@ RUN ./{{ s }}
|
|||
|
||||
DOC_URL = "http://repo2docker.readthedocs.io/en/latest/samples.html"
|
||||
|
||||
|
||||
class BuildPack(LoggingConfigurable):
|
||||
"""
|
||||
A composable BuildPack.
|
||||
|
@ -155,8 +156,8 @@ class BuildPack(LoggingConfigurable):
|
|||
Base set of apt packages that are installed for all images.
|
||||
|
||||
These contain useful images that are commonly used by a lot of images,
|
||||
where it would be useful to share a base docker image layer that contains
|
||||
them.
|
||||
where it would be useful to share a base docker image layer that
|
||||
contains them.
|
||||
|
||||
These would be installed with a --no-install-recommends option.
|
||||
"""
|
||||
|
@ -257,10 +258,10 @@ class BuildPack(LoggingConfigurable):
|
|||
post_build_scripts = List(
|
||||
[],
|
||||
help="""
|
||||
An ordered list of executable scripts that should be executed after build.
|
||||
An ordered list of executable scripts to execute after build.
|
||||
|
||||
Is run as a non-root user, and must be executable. Used for doing things
|
||||
that are currently not supported by other means!
|
||||
Is run as a non-root user, and must be executable. Used for doing
|
||||
things that are currently not supported by other means!
|
||||
|
||||
The scripts should be as deterministic as possible - running it twice
|
||||
should not produce different results!
|
||||
|
@ -295,8 +296,10 @@ class BuildPack(LoggingConfigurable):
|
|||
# FIXME: Deduplicate Env
|
||||
result.env = self.env + other.env
|
||||
result.build_scripts = self.build_scripts + other.build_scripts
|
||||
result.assemble_scripts = self.assemble_scripts + other.assemble_scripts
|
||||
result.post_build_scripts = self.post_build_scripts + other.post_build_scripts
|
||||
result.assemble_scripts = (self.assemble_scripts +
|
||||
other.assemble_scripts)
|
||||
result.post_build_scripts = (self.post_build_scripts +
|
||||
other.post_build_scripts)
|
||||
|
||||
build_script_files = {}
|
||||
build_script_files.update(self.build_script_files)
|
||||
|
@ -305,7 +308,8 @@ class BuildPack(LoggingConfigurable):
|
|||
|
||||
result.name = "{}-{}".format(self.name, other.name)
|
||||
|
||||
result.components = (self, ) + self.components + (other, ) + other.components
|
||||
result.components = ((self, ) + self.components +
|
||||
(other, ) + other.components)
|
||||
return result
|
||||
|
||||
def binder_path(self, path):
|
||||
|
@ -396,7 +400,8 @@ class BuildPack(LoggingConfigurable):
|
|||
}
|
||||
if memory_limit:
|
||||
limits['memory'] = memory_limit
|
||||
client = docker.APIClient(version='auto', **docker.utils.kwargs_from_env())
|
||||
client = docker.APIClient(version='auto',
|
||||
**docker.utils.kwargs_from_env())
|
||||
for line in client.build(
|
||||
fileobj=tarf,
|
||||
tag=image_spec,
|
||||
|
@ -426,20 +431,19 @@ class BaseImage(BuildPack):
|
|||
assemble_scripts = []
|
||||
try:
|
||||
with open(self.binder_path('apt.txt')) as f:
|
||||
|
||||
extra_apt_packages = []
|
||||
for l in f:
|
||||
package = l.partition('#')[0].strip()
|
||||
if not package:
|
||||
continue
|
||||
continue
|
||||
# Validate that this is, indeed, just a list of packages
|
||||
# We're doing shell injection around here, gotta be careful.
|
||||
# FIXME: Add support for specifying version numbers
|
||||
if not re.match(r"^[a-z0-9.+-]+", package):
|
||||
raise ValueError("Found invalid package name {} in apt.txt".format(package))
|
||||
raise ValueError("Found invalid package name {} in "
|
||||
"apt.txt".format(package))
|
||||
extra_apt_packages.append(package)
|
||||
|
||||
|
||||
assemble_scripts.append((
|
||||
'root',
|
||||
r"""
|
||||
|
@ -460,6 +464,7 @@ class BaseImage(BuildPack):
|
|||
if os.path.exists(post_build):
|
||||
if not stat.S_IXUSR & os.stat(post_build).st_mode:
|
||||
raise ValueError("%s is not executable, see %s for help." % (
|
||||
post_build, DOC_URL+'#system-post-build-scripts'))
|
||||
post_build,
|
||||
DOC_URL+'#system-post-build-scripts'))
|
||||
return [post_build]
|
||||
return []
|
||||
|
|
|
@ -16,7 +16,8 @@ def execute_cmd(cmd, capture=False, **kwargs):
|
|||
proc = subprocess.Popen(cmd, **kwargs)
|
||||
|
||||
if not capture:
|
||||
# not capturing output, let the subprocesses talk directly to the terminal
|
||||
# not capturing output, let the subprocesses talk directly
|
||||
# to the terminal
|
||||
ret = proc.wait()
|
||||
if ret != 0:
|
||||
raise subprocess.CalledProcessError(ret, cmd)
|
||||
|
@ -27,6 +28,7 @@ def execute_cmd(cmd, capture=False, **kwargs):
|
|||
# This should behave the same as .readline(), but splits on `\r` OR `\n`,
|
||||
# not just `\n`.
|
||||
buf = []
|
||||
|
||||
def flush():
|
||||
line = b''.join(buf).decode('utf8', 'replace')
|
||||
buf[:] = []
|
||||
|
|
|
@ -12,16 +12,19 @@ import subprocess
|
|||
import yaml
|
||||
import shlex
|
||||
|
||||
|
||||
def pytest_collect_file(parent, path):
|
||||
if path.basename == 'verify':
|
||||
return LocalRepo(path, parent)
|
||||
elif path.basename.endswith('.repos.yaml'):
|
||||
return RemoteRepoList(path, parent)
|
||||
|
||||
|
||||
class LocalRepo(pytest.File):
|
||||
def collect(self):
|
||||
yield LocalRepoTest(self.fspath.basename, self, self.fspath)
|
||||
|
||||
|
||||
class LocalRepoTest(pytest.Item):
|
||||
def __init__(self, name, parent, path):
|
||||
super().__init__(name, parent)
|
||||
|
@ -40,7 +43,8 @@ class RemoteRepoList(pytest.File):
|
|||
with self.fspath.open() as f:
|
||||
repos = yaml.safe_load(f)
|
||||
for repo in repos:
|
||||
yield RemoteRepoTest(repo['name'], self, repo['url'], repo['ref'], repo['verify'])
|
||||
yield RemoteRepoTest(repo['name'], self, repo['url'],
|
||||
repo['ref'], repo['verify'])
|
||||
|
||||
|
||||
class RemoteRepoTest(pytest.Item):
|
||||
|
|
Ładowanie…
Reference in New Issue