Merge pull request #14 from minrk/newlines

line endings!
pull/19/head
Yuvi Panda 2017-05-24 16:54:52 -07:00 zatwierdzone przez GitHub
commit 032d98f956
3 zmienionych plików z 73 dodań i 33 usunięć

Wyświetl plik

@ -18,7 +18,7 @@ import escapism
from traitlets.config import Application, LoggingConfigurable from traitlets.config import Application, LoggingConfigurable
from traitlets import Type, Bool, Unicode, Dict, List from traitlets import Type, Bool, Unicode, Dict, List, default
import docker import docker
from docker.utils import kwargs_from_env from docker.utils import kwargs_from_env
@ -40,6 +40,10 @@ class Repo2Docker(Application):
Path to read traitlets configuration file from. Path to read traitlets configuration file from.
""" """
) )
@default('log_level')
def _default_log_level(self):
return logging.INFO
repo = Unicode( repo = Unicode(
os.getcwd(), os.getcwd(),
@ -121,6 +125,13 @@ class Repo2Docker(Application):
DANGEROUS WHEN DONE IN A CLOUD ENVIRONMENT! ONLY USE LOCALLY! DANGEROUS WHEN DONE IN A CLOUD ENVIRONMENT! ONLY USE LOCALLY!
""" """
) )
json_logs = Bool(
False,
config=True,
help="""
Enable JSON logging for easier consumption by external services.
"""
)
aliases = Dict({ aliases = Dict({
'repo': 'Repo2Docker.repo', 'repo': 'Repo2Docker.repo',
@ -133,18 +144,21 @@ class Repo2Docker(Application):
'no-clean': ({'Repo2Docker': {'cleanup_checkout': False}}, 'Do not clean up git checkout'), 'no-clean': ({'Repo2Docker': {'cleanup_checkout': False}}, 'Do not clean up git checkout'),
'no-run': ({'Repo2Docker': {'run': False}}, 'Do not run built container image'), 'no-run': ({'Repo2Docker': {'run': False}}, 'Do not run built container image'),
'push': ({'Repo2Docker': {'push': True}}, 'Push built image to a docker registry'), 'push': ({'Repo2Docker': {'push': True}}, 'Push built image to a docker registry'),
'json-logs': ({'Repo2Docker': {'json_logs': True}}, 'Enable JSON logging'),
}) })
def fetch(self, url, ref, checkout_path): def fetch(self, url, ref, checkout_path):
try: try:
for line in execute_cmd(['git', 'clone', url, checkout_path]): for line in execute_cmd(['git', 'clone', url, checkout_path],
capture=self.json_logs):
self.log.info(line, extra=dict(phase='fetching')) self.log.info(line, extra=dict(phase='fetching'))
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.log.error('Failed to clone repository!', extra=dict(phase='failed')) self.log.error('Failed to clone repository!', extra=dict(phase='failed'))
sys.exit(1) sys.exit(1)
try: try:
for line in execute_cmd(['git', 'reset', '--hard', ref], cwd=checkout_path): for line in execute_cmd(['git', 'reset', '--hard', ref], cwd=checkout_path,
capture=self.json_logs):
self.log.info(line, extra=dict(phase='fetching')) self.log.info(line, extra=dict(phase='fetching'))
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.log.error('Failed to check out ref %s', ref, extra=dict(phase='failed')) self.log.error('Failed to check out ref %s', ref, extra=dict(phase='failed'))
@ -152,15 +166,22 @@ class Repo2Docker(Application):
def initialize(self, *args, **kwargs): def initialize(self, *args, **kwargs):
super().initialize(*args, **kwargs) super().initialize(*args, **kwargs)
logHandler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter()
logHandler.setFormatter(formatter)
# Need to reset existing handlers, or we repeat messages
self.log.handlers = []
self.log.addHandler(logHandler)
self.log.setLevel(logging.INFO)
self.load_config_file(self.config_file) self.load_config_file(self.config_file)
if self.json_logs:
# Need to reset existing handlers, or we repeat messages
logHandler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter()
logHandler.setFormatter(formatter)
self.log.handlers = []
self.log.addHandler(logHandler)
self.log.setLevel(logging.INFO)
else:
# due to json logger stuff above,
# our log messages include carriage returns, newlines, etc.
# remove the additional newline from the stream handler
self.log.handlers[0].terminator = ''
if len(self.extra_args) == 1: if len(self.extra_args) == 1:
# accept repo as a positional arg # accept repo as a positional arg
self.repo = self.extra_args[0] self.repo = self.extra_args[0]
@ -192,7 +213,7 @@ class Repo2Docker(Application):
else: else:
layers[progress['id']] = progress['status'] layers[progress['id']] = progress['status']
if time.time() - last_emit_time > 1.5: if time.time() - last_emit_time > 1.5:
self.log.info('Pushing image', extra=dict(progress=layers, phase='pushing')) self.log.info('Pushing image\n', extra=dict(progress=layers, phase='pushing'))
last_emit_time = time.time() last_emit_time = time.time()
def run_image(self): def run_image(self):
@ -210,9 +231,9 @@ class Repo2Docker(Application):
try: try:
for line in container.logs(stream=True): for line in container.logs(stream=True):
self.log.info(line.decode('utf-8').rstrip(), extra=dict(phase='running')) self.log.info(line.decode('utf-8'), extra=dict(phase='running'))
finally: finally:
self.log.info('Stopping container...', extra=dict(phase='running')) self.log.info('Stopping container...\n', extra=dict(phase='running'))
container.kill() container.kill()
container.remove() container.remove()
@ -247,9 +268,9 @@ class Repo2Docker(Application):
checkout_path checkout_path
) )
for bp_class in self.buildpacks: for bp_class in self.buildpacks:
bp = bp_class() bp = bp_class(parent=self, log=self.log, capture=self.json_logs)
if bp.detect(checkout_path): if bp.detect(checkout_path):
self.log.info('Using %s builder', bp.name, extra=dict(phase='building')) self.log.info('Using %s builder\n', bp.name, extra=dict(phase='building'))
bp.build(checkout_path, self.ref, self.output_image_spec) bp.build(checkout_path, self.ref, self.output_image_spec)
break break
else: else:

Wyświetl plik

@ -4,7 +4,7 @@ import subprocess
import docker import docker
from traitlets import Unicode, Dict from traitlets import Unicode, Dict, Bool
from traitlets.config import LoggingConfigurable from traitlets.config import LoggingConfigurable
import logging import logging
@ -16,16 +16,7 @@ here = os.path.abspath(os.path.dirname(__file__))
class BuildPack(LoggingConfigurable): class BuildPack(LoggingConfigurable):
name = Unicode() name = Unicode()
def __init__(self, *args, **kwargs): capture = Bool(False, help="Capture output for logging")
super().__init__(*args, **kwargs)
# FIXME: Not sure why this needs to be repeated - shouldn't configuring Application be enough?
logHandler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter()
logHandler.setFormatter(formatter)
# Need to reset existing handlers, or we repeat messages
self.log.handlers = []
self.log.addHandler(logHandler)
self.log.setLevel(logging.INFO)
def detect(self, workdir): def detect(self, workdir):
""" """
@ -53,7 +44,7 @@ class DockerBuildPack(BuildPack):
decode=True decode=True
): ):
if 'stream' in progress: if 'stream' in progress:
self.log.info(progress['stream'].rstrip(), extra=dict(phase='building')) self.log.info(progress['stream'], extra=dict(phase='building'))
class S2IBuildPack(BuildPack): class S2IBuildPack(BuildPack):
@ -79,7 +70,7 @@ class S2IBuildPack(BuildPack):
# in case user doesn't have s2i # in case user doesn't have s2i
env['PATH'] = os.pathsep.join([env.get('PATH') or os.defpath, here]) env['PATH'] = os.pathsep.join([env.get('PATH') or os.defpath, here])
try: try:
for line in execute_cmd(cmd, cwd=workdir, env=env): for line in execute_cmd(cmd, cwd=workdir, env=env, capture=self.capture):
self.log.info(line, extra=dict(phase='building', builder=self.name)) self.log.info(line, extra=dict(phase='building', builder=self.name))
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
self.log.error('Failed to build image!', extra=dict(phase='failed')) self.log.error('Failed to build image!', extra=dict(phase='failed'))

Wyświetl plik

@ -1,14 +1,42 @@
from functools import partial
import subprocess import subprocess
def execute_cmd(cmd, **kwargs): def execute_cmd(cmd, capture=False, **kwargs):
""" """
Call given command, yielding output line by line Call given command, yielding output line by line if capture=True
""" """
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, **kwargs) if capture:
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.STDOUT
proc = subprocess.Popen(cmd, **kwargs)
if not capture:
# not capturing output, let the subprocesses talk directly to the terminal
ret = proc.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, cmd)
return
# Capture output for logging.
# Each line will be yielded as text.
# This should behave the same as .readline(), but splits on `\r` OR `\n`,
# not just `\n`.
buf = []
def flush():
line = b''.join(buf).decode('utf8', 'replace')
buf[:] = []
return line
c_last = ''
try: try:
for line in iter(proc.stdout.readline, ''): for c in iter(partial(proc.stdout.read, 1), b''):
yield line.rstrip() if c_last == b'\r' and buf and c != b'\n':
yield flush()
buf.append(c)
if c == b'\n':
yield flush()
c_last = c
finally: finally:
ret = proc.wait() ret = proc.wait()
if ret != 0: if ret != 0: