Merge pull request #143 from betatim/pep8-roller

Apply PEP8 style roller
pull/169/head
Yuvi Panda 2017-12-08 12:22:53 -05:00 zatwierdzone przez GitHub
commit e151f77d3a
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
4 zmienionych plików z 61 dodań i 35 usunięć

Wyświetl plik

@ -107,16 +107,19 @@ class Repo2Docker(Application):
capture=self.json_logs): capture=self.json_logs):
self.log.info(line, extra=dict(phase='fetching')) self.log.info(line, extra=dict(phase='fetching'))
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.log.error('Failed to clone repository!', extra=dict(phase='failed')) self.log.error('Failed to clone repository!',
extra=dict(phase='failed'))
sys.exit(1) sys.exit(1)
if ref: if ref:
try: try:
for line in execute_cmd(['git', 'reset', '--hard', ref], cwd=checkout_path, for line in execute_cmd(['git', 'reset', '--hard', ref],
cwd=checkout_path,
capture=self.json_logs): capture=self.json_logs):
self.log.info(line, extra=dict(phase='fetching')) self.log.info(line, extra=dict(phase='fetching'))
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.log.error('Failed to check out ref %s', ref, extra=dict(phase='failed')) self.log.error('Failed to check out ref %s', ref,
extra=dict(phase='failed'))
sys.exit(1) sys.exit(1)
def get_argparser(self): def get_argparser(self):
@ -136,12 +139,14 @@ class Repo2Docker(Application):
argparser.add_argument( argparser.add_argument(
'repo', 'repo',
help='Path to repository that should be built. Could be local path or a git URL.' help=('Path to repository that should be built. Could be '
'local path or a git URL.')
) )
argparser.add_argument( argparser.add_argument(
'--image-name', '--image-name',
help='Name of image to be built. If unspecified will be autogenerated' help=('Name of image to be built. If unspecified will be '
'autogenerated')
) )
argparser.add_argument( argparser.add_argument(
@ -159,7 +164,8 @@ class Repo2Docker(Application):
'--no-build', '--no-build',
dest='build', dest='build',
action='store_false', action='store_false',
help="Do not actually build the image. Useful in conjunction with --debug." help=('Do not actually build the image. Useful in conjunction '
'with --debug.')
) )
argparser.add_argument( argparser.add_argument(
@ -202,10 +208,8 @@ class Repo2Docker(Application):
Avoids non-JSON output on errors when using --json-logs Avoids non-JSON output on errors when using --json-logs
""" """
self.log.error("Error during build: %s", evalue, self.log.error("Error during build: %s", evalue,
exc_info=(etype, evalue, traceback), exc_info=(etype, evalue, traceback),
extra=dict(phase='failed'), extra=dict(phase='failed'))
)
def initialize(self): def initialize(self):
args = self.get_argparser().parse_args() args = self.get_argparser().parse_args()
@ -243,14 +247,20 @@ class Repo2Docker(Application):
# remove the additional newline from the stream handler # remove the additional newline from the stream handler
self.log.handlers[0].terminator = '' self.log.handlers[0].terminator = ''
# We don't want a [Repo2Docker] on all messages # We don't want a [Repo2Docker] on all messages
self.log.handlers[0].formatter = logging.Formatter(fmt='%(message)s') self.log.handlers[0].formatter = logging.Formatter(
fmt='%(message)s'
)
if args.image_name: if args.image_name:
self.output_image_spec = args.image_name self.output_image_spec = args.image_name
else: else:
# Attempt to set a sane default! # Attempt to set a sane default!
# HACK: Provide something more descriptive? # HACK: Provide something more descriptive?
self.output_image_spec = 'r2d' + escapism.escape(self.repo, escape_char='-').lower() + str(int(time.time())) self.output_image_spec = (
'r2d' +
escapism.escape(self.repo, escape_char='-').lower() +
str(int(time.time()))
)
self.push = args.push self.push = args.push
self.run = args.run self.run = args.run
@ -269,7 +279,8 @@ class Repo2Docker(Application):
def push_image(self): def push_image(self):
client = docker.APIClient(version='auto', **kwargs_from_env()) client = docker.APIClient(version='auto', **kwargs_from_env())
# Build a progress setup for each layer, and only emit per-layer info every 1.5s # Build a progress setup for each layer, and only emit per-layer
# info every 1.5s
layers = {} layers = {}
last_emit_time = time.time() last_emit_time = time.time()
for line in client.push(self.output_image_spec, stream=True): for line in client.push(self.output_image_spec, stream=True):
@ -284,7 +295,8 @@ class Repo2Docker(Application):
else: else:
layers[progress['id']] = progress['status'] layers[progress['id']] = progress['status']
if time.time() - last_emit_time > 1.5: if time.time() - last_emit_time > 1.5:
self.log.info('Pushing image\n', extra=dict(progress=layers, phase='pushing')) self.log.info('Pushing image\n',
extra=dict(progress=layers, phase='pushing'))
last_emit_time = time.time() last_emit_time = time.time()
def run_image(self): def run_image(self):
@ -292,8 +304,9 @@ class Repo2Docker(Application):
port = self._get_free_port() port = self._get_free_port()
if not self.run_cmd: if not self.run_cmd:
port = str(self._get_free_port()) port = str(self._get_free_port())
run_cmd = ['jupyter', 'notebook', '--ip', '0.0.0.0', '--port', port] run_cmd = ['jupyter', 'notebook', '--ip', '0.0.0.0',
ports={'%s/tcp' % port: port} '--port', port]
ports = {'%s/tcp' % port: port}
else: else:
run_cmd = self.run_cmd run_cmd = self.run_cmd
ports = {} ports = {}
@ -309,11 +322,13 @@ class Repo2Docker(Application):
try: try:
for line in container.logs(stream=True): for line in container.logs(stream=True):
self.log.info(line.decode('utf-8'), extra=dict(phase='running')) self.log.info(line.decode('utf-8'),
extra=dict(phase='running'))
finally: finally:
container.reload() container.reload()
if container.status == 'running': if container.status == 'running':
self.log.info('Stopping container...\n', extra=dict(phase='running')) self.log.info('Stopping container...\n',
extra=dict(phase='running'))
container.kill() container.kill()
exit_code = container.attrs['State']['ExitCode'] exit_code = container.attrs['State']['ExitCode']
container.remove() container.remove()
@ -325,7 +340,7 @@ class Repo2Docker(Application):
""" """
import socket import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("",0)) s.bind(("", 0))
port = s.getsockname()[1] port = s.getsockname()[1]
s.close() s.close()
return port return port

Wyświetl plik

@ -116,6 +116,7 @@ RUN ./{{ s }}
DOC_URL = "http://repo2docker.readthedocs.io/en/latest/samples.html" DOC_URL = "http://repo2docker.readthedocs.io/en/latest/samples.html"
class BuildPack(LoggingConfigurable): class BuildPack(LoggingConfigurable):
""" """
A composable BuildPack. A composable BuildPack.
@ -155,8 +156,8 @@ class BuildPack(LoggingConfigurable):
Base set of apt packages that are installed for all images. Base set of apt packages that are installed for all images.
These contain useful images that are commonly used by a lot of images, These contain useful images that are commonly used by a lot of images,
where it would be useful to share a base docker image layer that contains where it would be useful to share a base docker image layer that
them. contains them.
These would be installed with a --no-install-recommends option. These would be installed with a --no-install-recommends option.
""" """
@ -257,10 +258,10 @@ class BuildPack(LoggingConfigurable):
post_build_scripts = List( post_build_scripts = List(
[], [],
help=""" help="""
An ordered list of executable scripts that should be executed after build. An ordered list of executable scripts to execute after build.
Is run as a non-root user, and must be executable. Used for doing things Is run as a non-root user, and must be executable. Used for doing
that are currently not supported by other means! things that are currently not supported by other means!
The scripts should be as deterministic as possible - running it twice The scripts should be as deterministic as possible - running it twice
should not produce different results! should not produce different results!
@ -295,8 +296,10 @@ class BuildPack(LoggingConfigurable):
# FIXME: Deduplicate Env # FIXME: Deduplicate Env
result.env = self.env + other.env result.env = self.env + other.env
result.build_scripts = self.build_scripts + other.build_scripts result.build_scripts = self.build_scripts + other.build_scripts
result.assemble_scripts = self.assemble_scripts + other.assemble_scripts result.assemble_scripts = (self.assemble_scripts +
result.post_build_scripts = self.post_build_scripts + other.post_build_scripts other.assemble_scripts)
result.post_build_scripts = (self.post_build_scripts +
other.post_build_scripts)
build_script_files = {} build_script_files = {}
build_script_files.update(self.build_script_files) build_script_files.update(self.build_script_files)
@ -305,7 +308,8 @@ class BuildPack(LoggingConfigurable):
result.name = "{}-{}".format(self.name, other.name) result.name = "{}-{}".format(self.name, other.name)
result.components = (self, ) + self.components + (other, ) + other.components result.components = ((self, ) + self.components +
(other, ) + other.components)
return result return result
def binder_path(self, path): def binder_path(self, path):
@ -396,7 +400,8 @@ class BuildPack(LoggingConfigurable):
} }
if memory_limit: if memory_limit:
limits['memory'] = memory_limit limits['memory'] = memory_limit
client = docker.APIClient(version='auto', **docker.utils.kwargs_from_env()) client = docker.APIClient(version='auto',
**docker.utils.kwargs_from_env())
for line in client.build( for line in client.build(
fileobj=tarf, fileobj=tarf,
tag=image_spec, tag=image_spec,
@ -426,20 +431,19 @@ class BaseImage(BuildPack):
assemble_scripts = [] assemble_scripts = []
try: try:
with open(self.binder_path('apt.txt')) as f: with open(self.binder_path('apt.txt')) as f:
extra_apt_packages = [] extra_apt_packages = []
for l in f: for l in f:
package = l.partition('#')[0].strip() package = l.partition('#')[0].strip()
if not package: if not package:
continue continue
# Validate that this is, indeed, just a list of packages # Validate that this is, indeed, just a list of packages
# We're doing shell injection around here, gotta be careful. # We're doing shell injection around here, gotta be careful.
# FIXME: Add support for specifying version numbers # FIXME: Add support for specifying version numbers
if not re.match(r"^[a-z0-9.+-]+", package): if not re.match(r"^[a-z0-9.+-]+", package):
raise ValueError("Found invalid package name {} in apt.txt".format(package)) raise ValueError("Found invalid package name {} in "
"apt.txt".format(package))
extra_apt_packages.append(package) extra_apt_packages.append(package)
assemble_scripts.append(( assemble_scripts.append((
'root', 'root',
r""" r"""
@ -460,6 +464,7 @@ class BaseImage(BuildPack):
if os.path.exists(post_build): if os.path.exists(post_build):
if not stat.S_IXUSR & os.stat(post_build).st_mode: if not stat.S_IXUSR & os.stat(post_build).st_mode:
raise ValueError("%s is not executable, see %s for help." % ( raise ValueError("%s is not executable, see %s for help." % (
post_build, DOC_URL+'#system-post-build-scripts')) post_build,
DOC_URL+'#system-post-build-scripts'))
return [post_build] return [post_build]
return [] return []

Wyświetl plik

@ -16,7 +16,8 @@ def execute_cmd(cmd, capture=False, **kwargs):
proc = subprocess.Popen(cmd, **kwargs) proc = subprocess.Popen(cmd, **kwargs)
if not capture: if not capture:
# not capturing output, let the subprocesses talk directly to the terminal # not capturing output, let the subprocesses talk directly
# to the terminal
ret = proc.wait() ret = proc.wait()
if ret != 0: if ret != 0:
raise subprocess.CalledProcessError(ret, cmd) raise subprocess.CalledProcessError(ret, cmd)
@ -27,6 +28,7 @@ def execute_cmd(cmd, capture=False, **kwargs):
# This should behave the same as .readline(), but splits on `\r` OR `\n`, # This should behave the same as .readline(), but splits on `\r` OR `\n`,
# not just `\n`. # not just `\n`.
buf = [] buf = []
def flush(): def flush():
line = b''.join(buf).decode('utf8', 'replace') line = b''.join(buf).decode('utf8', 'replace')
buf[:] = [] buf[:] = []

Wyświetl plik

@ -12,16 +12,19 @@ import subprocess
import yaml import yaml
import shlex import shlex
def pytest_collect_file(parent, path): def pytest_collect_file(parent, path):
if path.basename == 'verify': if path.basename == 'verify':
return LocalRepo(path, parent) return LocalRepo(path, parent)
elif path.basename.endswith('.repos.yaml'): elif path.basename.endswith('.repos.yaml'):
return RemoteRepoList(path, parent) return RemoteRepoList(path, parent)
class LocalRepo(pytest.File): class LocalRepo(pytest.File):
def collect(self): def collect(self):
yield LocalRepoTest(self.fspath.basename, self, self.fspath) yield LocalRepoTest(self.fspath.basename, self, self.fspath)
class LocalRepoTest(pytest.Item): class LocalRepoTest(pytest.Item):
def __init__(self, name, parent, path): def __init__(self, name, parent, path):
super().__init__(name, parent) super().__init__(name, parent)
@ -40,7 +43,8 @@ class RemoteRepoList(pytest.File):
with self.fspath.open() as f: with self.fspath.open() as f:
repos = yaml.safe_load(f) repos = yaml.safe_load(f)
for repo in repos: for repo in repos:
yield RemoteRepoTest(repo['name'], self, repo['url'], repo['ref'], repo['verify']) yield RemoteRepoTest(repo['name'], self, repo['url'],
repo['ref'], repo['verify'])
class RemoteRepoTest(pytest.Item): class RemoteRepoTest(pytest.Item):