Merge branch 'master' into master

pull/128/head
Rui Carmo 2019-11-20 18:42:50 +00:00 zatwierdzone przez GitHub
commit 2f1c815512
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
2 zmienionych plików z 192 dodań i 172 usunięć

3
.github/workflows/pylint.yml vendored 100644
Wyświetl plik

@ -0,0 +1,3 @@
- name: GitHub Action for pylint
uses: cclauss/GitHub-Action-for-pylint@0.7.0
args: "pip install -r requirements.txt ; pylint **/*.py"

361
piku.py
Wyświetl plik

@ -4,32 +4,31 @@
try:
from sys import version_info
assert version_info >= (3,5)
assert version_info >= (3, 5)
except AssertionError:
exit("Piku requires Python 3.5 or above")
from click import argument, command, group, get_current_context, option, secho as echo, pass_context
from collections import defaultdict, deque
from datetime import datetime
from fcntl import fcntl, F_SETFL, F_GETFL
from glob import glob
from hashlib import md5
from grp import getgrgid
from json import loads
from multiprocessing import cpu_count
from os import chmod, getgid, getuid, symlink, unlink, remove, stat, listdir, environ, makedirs, O_NONBLOCK
from os.path import abspath, basename, dirname, exists, getmtime, join, realpath, splitext
from pwd import getpwuid
from re import sub
from shutil import copyfile, rmtree, which
from socket import socket, AF_INET, SOCK_STREAM
from sys import argv, stdin, stdout, stderr, version_info, exit
from stat import S_IRUSR, S_IWUSR, S_IXUSR
from subprocess import call, check_output, Popen, STDOUT, PIPE
from subprocess import call, check_output, Popen, STDOUT
from sys import argv, stdin, stdout, stderr, version_info, exit
from tempfile import NamedTemporaryFile
from traceback import format_exc
from time import sleep
from traceback import format_exc
from urllib.request import urlopen
from pwd import getpwuid
from grp import getgrgid
from click import argument, group, secho as echo, pass_context
# === Make sure we can access all system binaries ===
@ -38,8 +37,8 @@ if 'sbin' not in environ['PATH']:
# === Globals - all tweakable settings are here ===
PIKU_ROOT = environ.get('PIKU_ROOT', join(environ['HOME'],'.piku'))
PIKU_BIN = join(environ['HOME'],'bin')
PIKU_ROOT = environ.get('PIKU_ROOT', join(environ['HOME'], '.piku'))
PIKU_BIN = join(environ['HOME'], 'bin')
PIKU_SCRIPT = realpath(__file__)
APP_ROOT = abspath(join(PIKU_ROOT, "apps"))
ENV_ROOT = abspath(join(PIKU_ROOT, "envs"))
@ -50,7 +49,7 @@ UWSGI_AVAILABLE = abspath(join(PIKU_ROOT, "uwsgi-available"))
UWSGI_ENABLED = abspath(join(PIKU_ROOT, "uwsgi-enabled"))
UWSGI_ROOT = abspath(join(PIKU_ROOT, "uwsgi"))
UWSGI_LOG_MAXSIZE = '1048576'
ACME_ROOT = environ.get('ACME_ROOT', join(environ['HOME'],'.acme.sh'))
ACME_ROOT = environ.get('ACME_ROOT', join(environ['HOME'], '.acme.sh'))
ACME_WWW = abspath(join(PIKU_ROOT, "acme"))
# === Make sure we can access piku user-installed binaries === #
@ -112,7 +111,7 @@ NGINX_COMMON_FRAGMENT = """
# These are not required under systemd - enable for debugging only
# access_log $LOG_ROOT/$APP/access.log;
# error_log $LOG_ROOT/$APP/error.log;
# Enable gzip compression
gzip on;
gzip_proxied any;
@ -121,7 +120,7 @@ NGINX_COMMON_FRAGMENT = """
gzip_min_length 2048;
gzip_vary on;
gzip_disable "MSIE [1-6]\.(?!.*SV1)";
# set a custom header for requests
add_header X-Deployed-By Piku;
@ -189,12 +188,13 @@ INTERNAL_NGINX_UWSGI_SETTINGS = """
uwsgi_param SERVER_NAME $server_name;
"""
# === Utility functions ===
def sanitize_app_name(app):
"""Sanitize the app name and build matching path"""
app = "".join(c for c in app if c.isalnum() or c in ('.','_')).rstrip().lstrip('/')
app = "".join(c for c in app if c.isalnum() or c in ('.', '_')).rstrip().lstrip('/')
return app
@ -210,9 +210,9 @@ def exit_if_invalid(app):
def get_free_port(address=""):
"""Find a free TCP port (entirely at random)"""
s = socket(AF_INET, SOCK_STREAM)
s.bind((address,0))
s.bind((address, 0))
port = s.getsockname()[1]
s.close()
return port
@ -220,7 +220,7 @@ def get_free_port(address=""):
def write_config(filename, bag, separator='='):
"""Helper for writing out config files"""
with open(filename, 'w') as h:
# pylint: disable=unused-variable
for k, v in bag.items():
@ -229,11 +229,11 @@ def write_config(filename, bag, separator='='):
def setup_authorized_keys(ssh_fingerprint, script_path, pubkey):
"""Sets up an authorized_keys file to redirect SSH commands"""
authorized_keys = join(environ['HOME'],'.ssh','authorized_keys')
authorized_keys = join(environ['HOME'], '.ssh', 'authorized_keys')
if not exists(dirname(authorized_keys)):
makedirs(dirname(authorized_keys))
# Restrict features and force all SSH commands to go through our script
# Restrict features and force all SSH commands to go through our script
with open(authorized_keys, 'a') as h:
h.write("""command="FINGERPRINT={ssh_fingerprint:s} NAME=default {script_path:s} $SSH_ORIGINAL_COMMAND",no-agent-forwarding,no-user-rc,no-X11-forwarding,no-port-forwarding {pubkey:s}\n""".format(**locals()))
chmod(dirname(authorized_keys), S_IRUSR | S_IWUSR | S_IXUSR)
@ -242,7 +242,7 @@ def setup_authorized_keys(ssh_fingerprint, script_path, pubkey):
def parse_procfile(filename):
"""Parses a Procfile and returns the worker types. Only one worker of each type is allowed."""
workers = {}
if not exists(filename):
return None
@ -257,22 +257,22 @@ def parse_procfile(filename):
workers[kind] = command
except:
echo("Warning: unrecognized Procfile entry '{}' at line {}".format(line, line_number), fg='yellow')
if not len(workers):
if len(workers) == 0:
return {}
# WSGI trumps regular web workers
if 'wsgi' in workers or 'jwsgi' in workers:
if 'web' in workers:
echo("Warning: found both 'wsgi' and 'web' workers, disabling 'web'", fg='yellow')
del(workers['web'])
return workers
del workers['web']
return workers
def expandvars(buffer, env, default=None, skip_escaped=False):
"""expand shell-style environment variables in a buffer"""
def replace_var(match):
return env.get(match.group(2) or match.group(1), match.group(0) if default is None else default)
pattern = (r'(?<!\\)' if skip_escaped else '') + r'\$(\w+|\{([^}]*)\})'
return sub(pattern, replace_var, buffer)
@ -288,13 +288,13 @@ def command_output(cmd):
def parse_settings(filename, env={}):
"""Parses a settings file and returns a dict with environment variables"""
if not exists(filename):
return {}
with open(filename, 'r') as settings:
for line in settings:
if '#' == line[0] or len(line.strip()) == 0: # ignore comments and newlines
if line[0] == '#' or len(line.strip()) == 0: # ignore comments and newlines
continue
try:
k, v = map(lambda x: x.strip(), line.split("=", 1))
@ -316,13 +316,16 @@ def check_requirements(binaries):
return False
return True
def found_app(kind):
"""Helper function to output app detected"""
echo("-----> {} app detected.".format(kind), fg='green')
return True
def do_deploy(app, deltas={}, newrev=None):
"""Deploy an app by resetting the work directory"""
app_path = join(APP_ROOT, app)
procfile = join(app_path, 'Procfile')
log_path = join(LOG_ROOT, app)
@ -340,17 +343,18 @@ def do_deploy(app, deltas={}, newrev=None):
if not exists(log_path):
makedirs(log_path)
workers = parse_procfile(procfile)
if workers and len(workers):
if workers and len(workers) > 0:
settings = {}
if exists(join(app_path, 'requirements.txt')) and found_app("Python"):
settings.update(deploy_python(app, deltas))
elif exists(join(app_path, 'package.json')) and found_app("Node") and (check_requirements(['nodejs', 'npm']) or check_requirements(['nodeenv'])):
elif exists(join(app_path, 'package.json')) and found_app("Node") and (
check_requirements(['nodejs', 'npm']) or check_requirements(['nodeenv'])):
settings.update(deploy_node(app, deltas))
elif exists(join(app_path, 'pom.xml')) and found_app("Java Maven") and check_requirements(['java', 'mvn']):
settings.update(deploy_java(app, deltas))
elif exists(join(app_path, 'build.gradle')) and found_app("Java Gradle") and check_requirements(['java', 'gradle']):
settings.update(deploy_java(app, deltas))
elif (exists(join(app_path, 'Godeps')) or len(glob(join(app_path,'*.go')))) and found_app("Go") and check_requirements(['go']):
elif (exists(join(app_path, 'Godeps')) or len(glob(join(app_path, '*.go')))) and found_app("Go") and check_requirements(['go']):
settings.update(deploy_go(app, deltas))
elif exists(join(app_path, 'project.clj')) and found_app("Clojure Lein") and check_requirements(['java', 'lein']):
settings.update(deploy_clojure(app, deltas))
@ -375,8 +379,9 @@ def do_deploy(app, deltas={}, newrev=None):
else:
echo("Error: app '{}' not found.".format(app), fg='red')
def deploy_gradle(app, deltas={}):
"""Deploy a Java application using Gradle"""
"""Deploy a Java application using Gradle"""
java_path = join(ENV_ROOT, app)
build_path = join(APP_ROOT, app, 'build')
env_file = join(APP_ROOT, app, 'ENV')
@ -384,12 +389,12 @@ def deploy_gradle(app, deltas={}):
env = {
'VIRTUAL_ENV': java_path,
"PATH": ':'.join([join(java_path, "bin"), join(app, ".bin"),environ['PATH']])
"PATH": ':'.join([join(java_path, "bin"), join(app, ".bin"), environ['PATH']])
}
if exists(env_file):
env.update(parse_settings(env_file, env))
if not exists(java_path):
makedirs(java_path)
@ -401,9 +406,10 @@ def deploy_gradle(app, deltas={}):
echo("-----> Removing previous builds")
echo("-----> Rebuilding Java Application")
call('gradle clean build', cwd=join(APP_ROOT, app), env=env, shell=True)
return spawn_app(app, deltas)
def deploy_java(app, deltas={}):
"""Deploy a Java application using Maven"""
# TODO: Use jenv to isolate Java Application environments
@ -415,12 +421,12 @@ def deploy_java(app, deltas={}):
env = {
'VIRTUAL_ENV': java_path,
"PATH": ':'.join([join(java_path, "bin"), join(app, ".bin"),environ['PATH']])
"PATH": ':'.join([join(java_path, "bin"), join(app, ".bin"), environ['PATH']])
}
if exists(env_file):
env.update(parse_settings(env_file, env))
if not exists(java_path):
makedirs(java_path)
@ -432,9 +438,10 @@ def deploy_java(app, deltas={}):
echo("-----> Removing previous builds")
echo("-----> Rebuilding Java Application")
call('mvn clean package', cwd=join(APP_ROOT, app), env=env, shell=True)
return spawn_app(app, deltas)
def deploy_clojure(app, deltas={}):
"""Deploy a Clojure Application"""
@ -448,7 +455,7 @@ def deploy_clojure(app, deltas={}):
env = {
'VIRTUAL_ENV': virtual,
"PATH": ':'.join([join(virtual, "bin"), join(app, ".bin"), environ['PATH']]),
"LEIN_HOME": environ.get('LEIN_HOME', join(environ['HOME'],'.lein')),
"LEIN_HOME": environ.get('LEIN_HOME', join(environ['HOME'], '.lein')),
}
if exists(env_file):
env.update(parse_settings(env_file, env))
@ -469,7 +476,7 @@ def deploy_go(app, deltas={}):
if not exists(go_path):
echo("-----> Creating GOPATH for '{}'".format(app), fg='green')
makedirs(go_path)
# copy across a pre-built GOPATH to save provisioning time
# copy across a pre-built GOPATH to save provisioning time
call('cp -a $HOME/gopath {}'.format(app), cwd=ENV_ROOT, shell=True)
first_time = True
@ -505,7 +512,7 @@ def deploy_node(app, deltas={}):
'VIRTUAL_ENV': virtualenv_path,
'NODE_PATH': node_path,
'NPM_CONFIG_PREFIX': abspath(join(node_path, "..")),
"PATH": ':'.join([join(virtualenv_path, "bin"), join(node_path, ".bin"),environ['PATH']])
"PATH": ':'.join([join(virtualenv_path, "bin"), join(node_path, ".bin"), environ['PATH']])
}
if exists(env_file):
env.update(parse_settings(env_file, env))
@ -515,7 +522,8 @@ def deploy_node(app, deltas={}):
version = env.get("NODE_VERSION")
node_binary = join(virtualenv_path, "bin", "node")
installed = check_output("{} -v".format(node_binary), cwd=join(APP_ROOT, app), env=env, shell=True).decode("utf8").rstrip("\n") if exists(node_binary) else ""
installed = check_output("{} -v".format(node_binary), cwd=join(APP_ROOT, app), env=env, shell=True).decode("utf8").rstrip(
"\n") if exists(node_binary) else ""
if version and check_requirements(['nodeenv']):
if not installed.endswith(version):
@ -524,7 +532,8 @@ def deploy_node(app, deltas={}):
echo("Warning: Can't update node with app running. Stop the app & retry.", fg='yellow')
else:
echo("-----> Installing node version '{NODE_VERSION:s}' using nodeenv".format(**env), fg='green')
call("nodeenv --prebuilt --node={NODE_VERSION:s} --clean-src --force {VIRTUAL_ENV:s}".format(**env), cwd=virtualenv_path, env=env, shell=True)
call("nodeenv --prebuilt --node={NODE_VERSION:s} --clean-src --force {VIRTUAL_ENV:s}".format(**env),
cwd=virtualenv_path, env=env, shell=True)
else:
echo("-----> Node is installed at {}.".format(version))
@ -539,7 +548,7 @@ def deploy_node(app, deltas={}):
def deploy_python(app, deltas={}):
"""Deploy a Python application"""
virtualenv_path = join(ENV_ROOT, app)
requirements = join(APP_ROOT, app, 'requirements.txt')
env_file = join(APP_ROOT, app, 'ENV')
@ -559,7 +568,7 @@ def deploy_python(app, deltas={}):
call('virtualenv --python=python{version:d} {app:s}'.format(**locals()), cwd=ENV_ROOT, shell=True)
first_time = True
activation_script = join(virtualenv_path,'bin','activate_this.py')
activation_script = join(virtualenv_path, 'bin', 'activate_this.py')
exec(open(activation_script).read(), dict(__file__=activation_script))
if first_time or getmtime(requirements) > getmtime(virtualenv_path):
@ -577,14 +586,14 @@ def deploy_identity(app, deltas={}):
def spawn_app(app, deltas={}):
"""Create all workers for an app"""
# pylint: disable=unused-variable
app_path = join(APP_ROOT, app)
procfile = join(app_path, 'Procfile')
workers = parse_procfile(procfile)
workers.pop("release", None)
ordinals = defaultdict(lambda:1)
worker_count = {k:1 for k in workers.keys()}
ordinals = defaultdict(lambda: 1)
worker_count = {k: 1 for k in workers.keys()}
# the Python virtualenv
virtualenv_path = join(ENV_ROOT, app)
@ -603,7 +612,7 @@ def spawn_app(app, deltas={}):
'LOG_ROOT': LOG_ROOT,
'HOME': environ['HOME'],
'USER': environ['USER'],
'PATH': ':'.join([join(virtualenv_path,'bin'),environ['PATH']]),
'PATH': ':'.join([join(virtualenv_path, 'bin'), environ['PATH']]),
'PWD': dirname(env_file),
'VIRTUAL_ENV': virtualenv_path,
}
@ -618,7 +627,7 @@ def spawn_app(app, deltas={}):
node_path = join(virtualenv_path, "node_modules")
if exists(node_path):
env["NODE_PATH"] = node_path
env["PATH"] = ':'.join([join(node_path, ".bin"),env['PATH']])
env["PATH"] = ':'.join([join(node_path, ".bin"), env['PATH']])
# Load environment variables shipped with repo (if any)
if exists(env_file):
@ -634,28 +643,28 @@ def spawn_app(app, deltas={}):
env['PORT'] = str(get_free_port())
echo("-----> picking free port {PORT}".format(**env))
# Safe defaults for addressing
# Safe defaults for addressing
for k, v in safe_defaults.items():
if k not in env:
echo("-----> nginx {k:s} set to {v}".format(**locals()))
env[k] = v
# Set up nginx if we have NGINX_SERVER_NAME set
if 'NGINX_SERVER_NAME' in env:
nginx = command_output("nginx -V")
nginx_ssl = "443 ssl"
if "--with-http_v2_module" in nginx:
nginx_ssl += " http2"
elif "--with-http_spdy_module" in nginx and "nginx/1.6.2" not in nginx: # avoid Raspbian bug
elif "--with-http_spdy_module" in nginx and "nginx/1.6.2" not in nginx: # avoid Raspbian bug
nginx_ssl += " spdy"
nginx_conf = join(NGINX_ROOT,"{}.conf".format(app))
env.update({
nginx_conf = join(NGINX_ROOT, "{}.conf".format(app))
env.update({
'NGINX_SSL': nginx_ssl,
'NGINX_ROOT': NGINX_ROOT,
'ACME_WWW': ACME_WWW,
})
# default to reverse proxying to the TCP port we picked
env['INTERNAL_NGINX_UWSGI_SETTINGS'] = 'proxy_pass http://{BIND_ADDRESS:s}:{PORT:s};'.format(**env)
if 'wsgi' in workers or 'jwsgi' in workers:
@ -665,12 +674,11 @@ def spawn_app(app, deltas={}):
if 'PORT' in env:
del env['PORT']
else:
env['NGINX_SOCKET'] = "{BIND_ADDRESS:s}:{PORT:s}".format(**env)
env['NGINX_SOCKET'] = "{BIND_ADDRESS:s}:{PORT:s}".format(**env)
echo("-----> nginx will look for app '{}' on {}".format(app, env['NGINX_SOCKET']))
domain = env['NGINX_SERVER_NAME'].split()[0]
key, crt = [join(NGINX_ROOT, "{}.{}".format(app,x)) for x in ['key','crt']]
domain = env['NGINX_SERVER_NAME'].split()[0]
key, crt = [join(NGINX_ROOT, "{}.{}".format(app, x)) for x in ['key', 'crt']]
if exists(join(ACME_ROOT, "acme.sh")):
acme = ACME_ROOT
www = ACME_WWW
@ -684,7 +692,8 @@ def spawn_app(app, deltas={}):
if not exists(key) or not exists(join(ACME_ROOT, domain, domain + ".key")):
echo("-----> getting letsencrypt certificate")
call('{acme:s}/acme.sh --issue -d {domain:s} -w {www:s}'.format(**locals()), shell=True)
call('{acme:s}/acme.sh --install-cert -d {domain:s} --key-file {key:s} --fullchain-file {crt:s}'.format(**locals()), shell=True)
call('{acme:s}/acme.sh --install-cert -d {domain:s} --key-file {key:s} --fullchain-file {crt:s}'.format(
**locals()), shell=True)
if exists(join(ACME_ROOT, domain)) and not exists(join(ACME_WWW, app)):
symlink(join(ACME_ROOT, domain), join(ACME_WWW, app))
else:
@ -693,8 +702,10 @@ def spawn_app(app, deltas={}):
# fall back to creating self-signed certificate if acme failed
if not exists(key) or stat(crt).st_size == 0:
echo("-----> generating self-signed certificate")
call('openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=NY/L=New York/O=Piku/OU=Self-Signed/CN={domain:s}" -keyout {key:s} -out {crt:s}'.format(**locals()), shell=True)
call(
'openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=NY/L=New York/O=Piku/OU=Self-Signed/CN={domain:s}" -keyout {key:s} -out {crt:s}'.format(
**locals()), shell=True)
# restrict access to server from CloudFlare IP addresses
acl = []
if env.get('NGINX_CLOUDFLARE_ACL', 'false').lower() == 'true':
@ -710,7 +721,7 @@ def spawn_app(app, deltas={}):
remote_ip = environ['SSH_CLIENT'].split()[0]
echo("-----> Adding your IP ({}) to nginx ACL".format(remote_ip))
acl.append("allow {};".format(remote_ip))
acl.extend(["allow 127.0.0.1;","deny all;"])
acl.extend(["allow 127.0.0.1;", "deny all;"])
except Exception:
cf = defaultdict()
echo("-----> Could not retrieve CloudFlare IP ranges: {}".format(format_exc()), fg="red")
@ -722,7 +733,7 @@ def spawn_app(app, deltas={}):
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = ''
# Get a mapping of /url:path1,/url2:path2
static_paths = env.get('NGINX_STATIC_PATHS','')
static_paths = env.get('NGINX_STATIC_PATHS', '')
# prepend static worker path if present
if 'static' in workers:
stripped = workers['static'].strip("/").rstrip("/")
@ -734,19 +745,21 @@ def spawn_app(app, deltas={}):
static_url, static_path = item.split(':')
if static_path[0] != '/':
static_path = join(app_path, static_path)
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = env['INTERNAL_NGINX_STATIC_MAPPINGS'] + expandvars(INTERNAL_NGINX_STATIC_MAPPING, locals())
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = env['INTERNAL_NGINX_STATIC_MAPPINGS'] + expandvars(
INTERNAL_NGINX_STATIC_MAPPING, locals())
except Exception as e:
echo("Error {} in static path spec: should be /url1:path1[,/url2:path2], ignoring.".format(e))
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = ''
env['INTERNAL_NGINX_CUSTOM_CLAUSES'] = expandvars(open(join(app_path, env["NGINX_INCLUDE_FILE"])).read(), env) if env.get("NGINX_INCLUDE_FILE") else ""
env['INTERNAL_NGINX_CUSTOM_CLAUSES'] = expandvars(open(join(app_path, env["NGINX_INCLUDE_FILE"])).read(),
env) if env.get("NGINX_INCLUDE_FILE") else ""
env['INTERNAL_NGINX_PORTMAP'] = ""
if 'web' in workers or 'wsgi' in workers or 'jwsgi' in workers:
env['INTERNAL_NGINX_PORTMAP'] = expandvars(NGINX_PORTMAP_FRAGMENT, env)
env['INTERNAL_NGINX_COMMON'] = expandvars(NGINX_COMMON_FRAGMENT, env)
echo("-----> nginx will map app '{}' to hostname '{}'".format(app, env['NGINX_SERVER_NAME']))
if('NGINX_HTTPS_ONLY' in env) or ('HTTPS_ONLY' in env):
if ('NGINX_HTTPS_ONLY' in env) or ('HTTPS_ONLY' in env):
buffer = expandvars(NGINX_HTTPS_ONLY_TEMPLATE, env)
echo("-----> nginx will redirect all requests to hostname '{}' to HTTPS".format(env['NGINX_SERVER_NAME']))
else:
@ -765,17 +778,17 @@ def spawn_app(app, deltas={}):
# Configured worker count
if exists(scaling):
worker_count.update({k: int(v) for k,v in parse_procfile(scaling).items() if k in workers})
worker_count.update({k: int(v) for k, v in parse_procfile(scaling).items() if k in workers})
to_create = {}
to_destroy = {}
to_destroy = {}
for k, v in worker_count.items():
to_create[k] = range(1,worker_count[k] + 1)
to_create[k] = range(1, worker_count[k] + 1)
if k in deltas and deltas[k]:
to_create[k] = range(1, worker_count[k] + deltas[k] + 1)
if deltas[k] < 0:
to_destroy[k] = range(worker_count[k], worker_count[k] + deltas[k], -1)
worker_count[k] = worker_count[k]+deltas[k]
worker_count[k] = worker_count[k] + deltas[k]
# Cleanup env
for k, v in list(env.items()):
@ -785,7 +798,7 @@ def spawn_app(app, deltas={}):
# Save current settings
write_config(live, env)
write_config(scaling, worker_count, ':')
if env.get("AUTO_RESTART", False):
config = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
if len(config):
@ -800,7 +813,7 @@ def spawn_app(app, deltas={}):
if not exists(enabled):
echo("-----> spawning '{app:s}:{k:s}.{w:d}'".format(**locals()), fg='green')
spawn_worker(app, k, workers[k], env, w)
# Remove unnecessary workers (leave logfiles)
for k, v in to_destroy.items():
for w in v:
@ -810,11 +823,11 @@ def spawn_app(app, deltas={}):
unlink(enabled)
return env
def spawn_worker(app, kind, command, env, ordinal=1):
"""Set up and deploy a single worker of a given kind"""
# pylint: disable=unused-variable
env['PROC_TYPE'] = kind
env_path = join(ENV_ROOT, app)
@ -823,61 +836,60 @@ def spawn_worker(app, kind, command, env, ordinal=1):
log_file = join(LOG_ROOT, app, kind)
settings = [
('chdir', join(APP_ROOT, app)),
('master', 'true'),
('project', app),
('max-requests', env.get('UWSGI_MAX_REQUESTS', '1024')),
('listen', env.get('UWSGI_LISTEN', '16')),
('processes', env.get('UWSGI_PROCESSES', '1')),
('procname-prefix', '{app:s}:{kind:s}'.format(**locals())),
('enable-threads', env.get('UWSGI_ENABLE_THREADS', 'true').lower()),
('chdir', join(APP_ROOT, app)),
('master', 'true'),
('project', app),
('max-requests', env.get('UWSGI_MAX_REQUESTS', '1024')),
('listen', env.get('UWSGI_LISTEN', '16')),
('processes', env.get('UWSGI_PROCESSES', '1')),
('procname-prefix', '{app:s}:{kind:s}'.format(**locals())),
('enable-threads', env.get('UWSGI_ENABLE_THREADS', 'true').lower()),
('log-x-forwarded-for', env.get('UWSGI_LOG_X_FORWARDED_FOR', 'false').lower()),
('log-maxsize', env.get('UWSGI_LOG_MAXSIZE', UWSGI_LOG_MAXSIZE)),
('logto', '{log_file:s}.{ordinal:d}.log'.format(**locals())),
('log-backupname', '{log_file:s}.{ordinal:d}.log.old'.format(**locals())),
('log-maxsize', env.get('UWSGI_LOG_MAXSIZE', UWSGI_LOG_MAXSIZE)),
('logto', '{log_file:s}.{ordinal:d}.log'.format(**locals())),
('log-backupname', '{log_file:s}.{ordinal:d}.log.old'.format(**locals())),
]
# only add virtualenv to uwsgi if it's a real virtualenv
if exists(join(env_path, "bin", "activate_this.py")):
settings.append(('virtualenv', env_path))
if kind== 'jwsgi':
if kind == 'jwsgi':
settings.extend([
('module', command),
('threads', env.get('UWSGI_THREADS','4')),
('threads', env.get('UWSGI_THREADS', '4')),
('plugin', 'jvm'),
('plugin', 'jwsgi')
])
python_version = int(env.get('PYTHON_VERSION','3'))
python_version = int(env.get('PYTHON_VERSION', '3'))
if kind == 'wsgi':
settings.extend([
('module', command),
('threads', env.get('UWSGI_THREADS','4')),
('module', command),
('threads', env.get('UWSGI_THREADS', '4')),
])
if python_version == 2:
settings.extend([
('plugin', 'python'),
('plugin', 'python'),
])
if 'UWSGI_GEVENT' in env:
settings.extend([
('plugin', 'gevent_python'),
('gevent', env['UWSGI_GEVENT']),
('plugin', 'gevent_python'),
('gevent', env['UWSGI_GEVENT']),
])
elif 'UWSGI_ASYNCIO' in env:
settings.extend([
('plugin', 'asyncio_python'),
('plugin', 'asyncio_python'),
])
elif python_version == 3:
settings.extend([
('plugin', 'python3'),
('plugin', 'python3'),
])
if 'UWSGI_ASYNCIO' in env:
settings.extend([
('plugin', 'asyncio_python3'),
('plugin', 'asyncio_python3'),
])
# If running under nginx, don't expose a port at all
if 'NGINX_SERVER_NAME' in env:
@ -890,7 +902,7 @@ def spawn_worker(app, kind, command, env, ordinal=1):
else:
echo("-----> nginx will talk to uWSGI via {BIND_ADDRESS:s}:{PORT:s}".format(**env), fg='yellow')
settings.extend([
('http', '{BIND_ADDRESS:s}:{PORT:s}'.format(**env)),
('http', '{BIND_ADDRESS:s}:{PORT:s}'.format(**env)),
('http-socket', '{BIND_ADDRESS:s}:{PORT:s}'.format(**env)),
])
elif kind == 'web':
@ -900,10 +912,11 @@ def spawn_worker(app, kind, command, env, ordinal=1):
echo("-----> nginx serving static files only".format(**env), fg='yellow')
else:
settings.append(('attach-daemon', command))
if kind in ['wsgi', 'web']:
settings.append(('log-format','%%(addr) - %%(user) [%%(ltime)] "%%(method) %%(uri) %%(proto)" %%(status) %%(size) "%%(referer)" "%%(uagent)" %%(msecs)ms'))
settings.append(('log-format',
'%%(addr) - %%(user) [%%(ltime)] "%%(method) %%(uri) %%(proto)" %%(status) %%(size) "%%(referer)" "%%(uagent)" %%(msecs)ms'))
# remove unnecessary variables from the env in nginx.ini
for k in ['NGINX_ACL']:
if k in env:
@ -923,10 +936,13 @@ def spawn_worker(app, kind, command, env, ordinal=1):
copyfile(available, enabled)
def do_restart(app):
"""Restarts a deployed app"""
config = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
if len(config):
if len(config) > 0:
echo("Restarting app '{}'...".format(app), fg='yellow')
for c in config:
remove(c)
@ -937,7 +953,7 @@ def do_restart(app):
def multi_tail(app, filenames, catch_up=20):
"""Tails multiple log files"""
# Seek helper
def peek(handle):
where = handle.tell()
@ -950,17 +966,17 @@ def multi_tail(app, filenames, catch_up=20):
inodes = {}
files = {}
prefixes = {}
# Set up current state for each log file
for f in filenames:
prefixes[f] = splitext(basename(f))[0]
files[f] = open(f)
inodes[f] = stat(f).st_ino
files[f].seek(0, 2)
longest = max(map(len, prefixes.values()))
# Grab a little history (if any)
# Grab a little history (if any)
for f in filenames:
for line in deque(open(f), catch_up):
yield "{} | {}".format(prefixes[f].ljust(longest), line)
@ -970,12 +986,10 @@ def multi_tail(app, filenames, catch_up=20):
# Check for updates on every file
for f in filenames:
line = peek(files[f])
if not line:
continue
else:
if line:
updated = True
yield "{} | {}".format(prefixes[f].ljust(longest), line)
if not updated:
sleep(1)
# Check if logs rotated
@ -988,15 +1002,17 @@ def multi_tail(app, filenames, catch_up=20):
filenames.remove(f)
# === CLI commands ===
# === CLI commands ===
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@group(context_settings=CONTEXT_SETTINGS)
def piku():
"""The smallest PaaS you've ever seen"""
pass
@piku.resultcallback()
def cleanup(ctx):
"""Callback from command execution -- add debugging to taste"""
@ -1008,7 +1024,7 @@ def cleanup(ctx):
@piku.command("apps")
def list_apps():
"""List apps, e.g.: piku apps"""
for a in listdir(APP_ROOT):
running = len(glob(join(UWSGI_ENABLED, '{}*.ini'.format(a)))) != 0
echo(('*' if running else ' ') + a, fg='green')
@ -1018,9 +1034,9 @@ def list_apps():
@argument('app')
def cmd_config(app):
"""Show config, e.g.: piku config <app>"""
app = exit_if_invalid(app)
config_file = join(ENV_ROOT, app, 'ENV')
if exists(config_file):
echo(open(config_file).read().strip(), fg='white')
@ -1033,9 +1049,9 @@ def cmd_config(app):
@argument('setting')
def cmd_config_get(app, setting):
"""e.g.: piku config:get <app> FOO"""
app = exit_if_invalid(app)
config_file = join(ENV_ROOT, app, 'ENV')
if exists(config_file):
env = parse_settings(config_file)
@ -1050,9 +1066,9 @@ def cmd_config_get(app, setting):
@argument('settings', nargs=-1)
def cmd_config_set(app, settings):
"""e.g.: piku config:set <app> FOO=bar BAZ=quux"""
app = exit_if_invalid(app)
config_file = join(ENV_ROOT, app, 'ENV')
env = parse_settings(config_file)
for s in settings:
@ -1072,9 +1088,9 @@ def cmd_config_set(app, settings):
@argument('settings', nargs=-1)
def cmd_config_unset(app, settings):
"""e.g.: piku config:unset <app> FOO"""
app = exit_if_invalid(app)
config_file = join(ENV_ROOT, app, 'ENV')
env = parse_settings(config_file)
for s in settings:
@ -1089,7 +1105,7 @@ def cmd_config_unset(app, settings):
@argument('app')
def cmd_config_live(app):
"""e.g.: piku config:live <app>"""
app = exit_if_invalid(app)
live_config = join(ENV_ROOT, app, 'LIVE_ENV')
@ -1103,7 +1119,7 @@ def cmd_config_live(app):
@argument('app')
def cmd_deploy(app):
"""e.g.: piku deploy <app>"""
app = exit_if_invalid(app)
do_deploy(app)
@ -1112,9 +1128,9 @@ def cmd_deploy(app):
@argument('app')
def cmd_destroy(app):
"""e.g.: piku destroy <app>"""
app = exit_if_invalid(app)
for p in [join(x, app) for x in [APP_ROOT, GIT_ROOT, ENV_ROOT, LOG_ROOT]]:
if exists(p):
echo("Removing folder '{}'".format(p), fg='yellow')
@ -1122,12 +1138,12 @@ def cmd_destroy(app):
for p in [join(x, '{}*.ini'.format(app)) for x in [UWSGI_AVAILABLE, UWSGI_ENABLED]]:
g = glob(p)
if len(g):
if len(g) > 0:
for f in g:
echo("Removing file '{}'".format(f), fg='yellow')
remove(f)
nginx_files = [join(NGINX_ROOT, "{}.{}".format(app,x)) for x in ['conf','sock','key','crt']]
nginx_files = [join(NGINX_ROOT, "{}.{}".format(app, x)) for x in ['conf', 'sock', 'key', 'crt']]
for f in nginx_files:
if exists(f):
echo("Removing file '{}'".format(f), fg='yellow')
@ -1141,17 +1157,17 @@ def cmd_destroy(app):
echo("Removing file '{}'".format(acme_link), fg='yellow')
unlink(acme_link)
@piku.command("logs")
@argument('app')
@argument('process', nargs=1, default='*')
def cmd_logs(app, process):
"""Tail running logs, e.g: piku logs <app> [<process>]"""
app = exit_if_invalid(app)
logfiles = glob(join(LOG_ROOT, app, process + '.*.log'))
if len(logfiles):
if len(logfiles) > 0:
for line in multi_tail(app, logfiles):
echo(line.strip(), fg='white')
else:
@ -1162,7 +1178,7 @@ def cmd_logs(app, process):
@argument('app')
def cmd_ps(app):
"""Show process count, e.g: piku ps <app>"""
app = exit_if_invalid(app)
config_file = join(ENV_ROOT, app, 'SCALING')
@ -1177,16 +1193,16 @@ def cmd_ps(app):
@argument('settings', nargs=-1)
def cmd_ps_scale(app, settings):
"""e.g.: piku ps:scale <app> <proc>=<count>"""
app = exit_if_invalid(app)
config_file = join(ENV_ROOT, app, 'SCALING')
worker_count = {k:int(v) for k, v in parse_procfile(config_file).items()}
worker_count = {k: int(v) for k, v in parse_procfile(config_file).items()}
deltas = {}
for s in settings:
try:
k, v = map(lambda x: x.strip(), s.split("=", 1))
c = int(v) # check for integer value
c = int(v) # check for integer value
if c < 0:
echo("Error: cannot scale type '{}' below 0".format(k), fg='red')
return
@ -1213,14 +1229,15 @@ def cmd_run(app, cmd):
for f in [stdout, stderr]:
fl = fcntl(f, F_GETFL)
fcntl(f, F_SETFL, fl | O_NONBLOCK)
p = Popen(' '.join(cmd), stdin=stdin, stdout=stdout, stderr=stderr, env=environ, cwd=join(APP_ROOT,app), shell=True)
p.communicate()
p = Popen(' '.join(cmd), stdin=stdin, stdout=stdout, stderr=stderr, env=environ, cwd=join(APP_ROOT, app), shell=True)
p.communicate()
@piku.command("restart")
@argument('app')
def cmd_restart(app):
"""Restart an app: piku restart <app>"""
app = exit_if_invalid(app)
do_restart(app)
@ -1230,35 +1247,35 @@ def cmd_restart(app):
def cmd_setup():
"""Initialize environment"""
echo("Running in Python {}".format(".".join(map(str,version_info))))
echo("Running in Python {}".format(".".join(map(str, version_info))))
# Create required paths
for p in [APP_ROOT, GIT_ROOT, ENV_ROOT, UWSGI_ROOT, UWSGI_AVAILABLE, UWSGI_ENABLED, LOG_ROOT, NGINX_ROOT]:
if not exists(p):
echo("Creating '{}'.".format(p), fg='green')
makedirs(p)
# Set up the uWSGI emperor config
settings = [
('chdir', UWSGI_ROOT),
('emperor', UWSGI_ENABLED),
('log-maxsize', UWSGI_LOG_MAXSIZE),
('logto', join(UWSGI_ROOT, 'uwsgi.log')),
('log-backupname', join(UWSGI_ROOT, 'uwsgi.old.log')),
('socket', join(UWSGI_ROOT, 'uwsgi.sock')),
('uid', getpwuid(getuid()).pw_name),
('gid', getgrgid(getgid()).gr_name),
('enable-threads', 'true'),
('threads', '{}'.format(cpu_count() * 2)),
('chdir', UWSGI_ROOT),
('emperor', UWSGI_ENABLED),
('log-maxsize', UWSGI_LOG_MAXSIZE),
('logto', join(UWSGI_ROOT, 'uwsgi.log')),
('log-backupname', join(UWSGI_ROOT, 'uwsgi.old.log')),
('socket', join(UWSGI_ROOT, 'uwsgi.sock')),
('uid', getpwuid(getuid()).pw_name),
('gid', getgrgid(getgid()).gr_name),
('enable-threads', 'true'),
('threads', '{}'.format(cpu_count() * 2)),
]
with open(join(UWSGI_ROOT,'uwsgi.ini'), 'w') as h:
with open(join(UWSGI_ROOT, 'uwsgi.ini'), 'w') as h:
h.write('[uwsgi]\n')
# pylint: disable=unused-variable
for k, v in settings:
h.write("{k:s} = {v}\n".format(**locals()))
# mark this script as executable (in case we were invoked via interpreter)
if not(stat(PIKU_SCRIPT).st_mode & S_IXUSR):
if not (stat(PIKU_SCRIPT).st_mode & S_IXUSR):
echo("Setting '{}' as executable.".format(PIKU_SCRIPT), fg='yellow')
chmod(PIKU_SCRIPT, stat(PIKU_SCRIPT).st_mode | S_IXUSR)
@ -1277,7 +1294,7 @@ def cmd_setup_ssh(public_key_file):
setup_authorized_keys(fingerprint, PIKU_SCRIPT, key)
except Exception:
echo("Error: invalid public key file '{}': {}".format(key_file, format_exc()), fg='red')
elif '-' == public_key_file:
elif public_key_file == '-':
buffer = "".join(stdin.readlines())
with NamedTemporaryFile(mode="w") as f:
f.write(buffer)
@ -1297,25 +1314,25 @@ def cmd_stop(app):
app = exit_if_invalid(app)
config = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
if len(config):
if len(config) > 0:
echo("Stopping app '{}'...".format(app), fg='yellow')
for c in config:
remove(c)
else:
echo("Error: app '{}' not deployed!".format(app), fg='red')
# --- Internal commands ---
@piku.command("git-hook", hidden=True)
@argument('app')
def cmd_git_hook(app):
"""INTERNAL: Post-receive git hook"""
app = sanitize_app_name(app)
repo_path = join(GIT_ROOT, app)
app_path = join(APP_ROOT, app)
for line in stdin:
# pylint: disable=unused-variable
oldrev, newrev, refname = line.strip().split(" ")
@ -1353,7 +1370,7 @@ cat | PIKU_ROOT="{PIKU_ROOT:s}" {PIKU_SCRIPT:s} git-hook {app:s}""".format(**env
@piku.command("git-upload-pack", hidden=True)
@argument('app')
def cmd_git_receive_pack(app):
def cmd_git_upload_pack(app):
"""INTERNAL: Handle git upload pack for an app"""
app = sanitize_app_name(app)
env = globals()