2018-06-14 18:37:53 +00:00
|
|
|
#!/usr/bin/env python3
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2017-05-14 22:34:29 +00:00
|
|
|
"Piku Micro-PaaS"
|
|
|
|
|
2017-05-15 20:37:14 +00:00
|
|
|
from click import argument, command, group, get_current_context, option, secho as echo
|
2016-04-01 23:16:10 +00:00
|
|
|
from collections import defaultdict, deque
|
2016-04-25 10:55:37 +00:00
|
|
|
from datetime import datetime
|
2016-09-10 09:37:37 +00:00
|
|
|
from fcntl import fcntl, F_SETFL, F_GETFL
|
2016-04-01 23:16:10 +00:00
|
|
|
from glob import glob
|
2016-04-25 10:50:37 +00:00
|
|
|
from hashlib import md5
|
2016-05-05 19:00:25 +00:00
|
|
|
from json import loads
|
2016-04-04 08:12:25 +00:00
|
|
|
from multiprocessing import cpu_count
|
2018-06-15 22:41:51 +00:00
|
|
|
from os import chmod, getgid, getuid, unlink, remove, stat, listdir, environ, makedirs, O_NONBLOCK
|
2016-04-03 16:14:15 +00:00
|
|
|
from os.path import abspath, basename, dirname, exists, getmtime, join, realpath, splitext
|
2016-09-10 09:37:37 +00:00
|
|
|
from re import sub
|
2018-06-24 20:54:29 +00:00
|
|
|
from shutil import copyfile, rmtree, which
|
2016-09-10 09:37:37 +00:00
|
|
|
from socket import socket, AF_INET, SOCK_STREAM
|
2017-05-14 22:34:29 +00:00
|
|
|
from sys import argv, stdin, stdout, stderr, version_info
|
2016-09-10 09:37:37 +00:00
|
|
|
from stat import S_IRUSR, S_IWUSR, S_IXUSR
|
|
|
|
from subprocess import call, check_output, Popen, STDOUT, PIPE
|
2017-05-14 22:34:29 +00:00
|
|
|
from tempfile import NamedTemporaryFile
|
|
|
|
from traceback import format_exc
|
2016-03-29 20:57:51 +00:00
|
|
|
from time import sleep
|
2018-06-24 20:54:29 +00:00
|
|
|
from urllib.request import urlopen
|
|
|
|
from pwd import getpwuid
|
|
|
|
from grp import getgrgid
|
|
|
|
|
|
|
|
# === Make sure we can access all system binaries ===
|
|
|
|
|
|
|
|
if 'sbin' not in environ['PATH']:
|
|
|
|
environ['PATH'] = environ['PATH'] + ":/usr/sbin:/usr/local/sbin"
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
# === Globals - all tweakable settings are here ===
|
2016-03-27 12:12:13 +00:00
|
|
|
|
2016-09-10 09:37:37 +00:00
|
|
|
PIKU_ROOT = environ.get('PIKU_ROOT', join(environ['HOME'],'.piku'))
|
2016-03-28 22:37:36 +00:00
|
|
|
|
2016-03-27 12:08:30 +00:00
|
|
|
APP_ROOT = abspath(join(PIKU_ROOT, "apps"))
|
2016-03-27 12:20:44 +00:00
|
|
|
ENV_ROOT = abspath(join(PIKU_ROOT, "envs"))
|
2016-03-27 12:08:30 +00:00
|
|
|
GIT_ROOT = abspath(join(PIKU_ROOT, "repos"))
|
|
|
|
LOG_ROOT = abspath(join(PIKU_ROOT, "logs"))
|
2016-04-25 09:10:07 +00:00
|
|
|
NGINX_ROOT = abspath(join(PIKU_ROOT, "nginx"))
|
2016-03-28 22:37:36 +00:00
|
|
|
UWSGI_AVAILABLE = abspath(join(PIKU_ROOT, "uwsgi-available"))
|
|
|
|
UWSGI_ENABLED = abspath(join(PIKU_ROOT, "uwsgi-enabled"))
|
|
|
|
UWSGI_ROOT = abspath(join(PIKU_ROOT, "uwsgi"))
|
2016-04-04 08:12:25 +00:00
|
|
|
UWSGI_LOG_MAXSIZE = '1048576'
|
2016-04-25 09:10:07 +00:00
|
|
|
NGINX_TEMPLATE = """
|
|
|
|
upstream $APP {
|
2016-05-03 21:18:03 +00:00
|
|
|
server $NGINX_SOCKET;
|
2016-04-25 09:10:07 +00:00
|
|
|
}
|
|
|
|
server {
|
2016-04-25 10:50:37 +00:00
|
|
|
listen [::]:80;
|
|
|
|
listen 80;
|
2016-04-25 12:23:00 +00:00
|
|
|
|
|
|
|
listen [::]:$NGINX_SSL;
|
|
|
|
listen $NGINX_SSL;
|
2016-04-25 10:50:37 +00:00
|
|
|
ssl on;
|
2016-04-25 12:23:00 +00:00
|
|
|
ssl_certificate $NGINX_ROOT/$APP.crt;
|
|
|
|
ssl_certificate_key $NGINX_ROOT/$APP.key;
|
2016-05-14 17:31:02 +00:00
|
|
|
server_name $NGINX_SERVER_NAME;
|
2016-04-25 12:23:00 +00:00
|
|
|
|
|
|
|
# These are not required under systemd - enable for debugging only
|
|
|
|
# access_log $LOG_ROOT/$APP/access.log;
|
|
|
|
# error_log $LOG_ROOT/$APP/error.log;
|
2016-05-08 22:06:55 +00:00
|
|
|
|
|
|
|
# Enable gzip compression
|
|
|
|
gzip on;
|
|
|
|
gzip_proxied any;
|
|
|
|
gzip_types text/plain text/xml text/css application/x-javascript text/javascript application/xml+rss application/atom+xml;
|
|
|
|
gzip_comp_level 7;
|
2016-05-08 22:24:21 +00:00
|
|
|
gzip_min_length 2048;
|
2016-05-08 22:06:55 +00:00
|
|
|
gzip_vary on;
|
|
|
|
gzip_disable "MSIE [1-6]\.(?!.*SV1)";
|
|
|
|
|
2016-04-25 09:10:07 +00:00
|
|
|
# set a custom header for requests
|
|
|
|
add_header X-Deployed-By Piku;
|
|
|
|
|
2017-10-07 11:41:07 +00:00
|
|
|
$INTERNAL_NGINX_STATIC_MAPPINGS
|
2016-10-02 17:40:02 +00:00
|
|
|
|
2016-04-25 09:10:07 +00:00
|
|
|
location / {
|
2016-04-25 12:58:47 +00:00
|
|
|
uwsgi_pass $APP;
|
|
|
|
uwsgi_param QUERY_STRING $query_string;
|
|
|
|
uwsgi_param REQUEST_METHOD $request_method;
|
|
|
|
uwsgi_param CONTENT_TYPE $content_type;
|
|
|
|
uwsgi_param CONTENT_LENGTH $content_length;
|
|
|
|
uwsgi_param REQUEST_URI $request_uri;
|
|
|
|
uwsgi_param PATH_INFO $document_uri;
|
|
|
|
uwsgi_param DOCUMENT_ROOT $document_root;
|
|
|
|
uwsgi_param SERVER_PROTOCOL $server_protocol;
|
|
|
|
uwsgi_param REMOTE_ADDR $remote_addr;
|
|
|
|
uwsgi_param REMOTE_PORT $remote_port;
|
|
|
|
uwsgi_param SERVER_ADDR $server_addr;
|
|
|
|
uwsgi_param SERVER_PORT $server_port;
|
|
|
|
uwsgi_param SERVER_NAME $server_name;
|
2016-04-25 09:10:07 +00:00
|
|
|
proxy_http_version 1.1;
|
|
|
|
proxy_set_header Upgrade $http_upgrade;
|
|
|
|
proxy_set_header Connection "upgrade";
|
|
|
|
proxy_set_header Host $http_host;
|
|
|
|
proxy_set_header X-Forwarded-Proto $scheme;
|
|
|
|
proxy_set_header X-Forwarded-For $remote_addr;
|
|
|
|
proxy_set_header X-Forwarded-Port $server_port;
|
|
|
|
proxy_set_header X-Request-Start $msec;
|
2016-05-05 19:04:29 +00:00
|
|
|
$NGINX_ACL
|
|
|
|
}
|
2016-04-25 09:10:07 +00:00
|
|
|
}
|
|
|
|
"""
|
2016-03-27 12:12:13 +00:00
|
|
|
|
2017-10-07 11:41:07 +00:00
|
|
|
INTERNAL_NGINX_STATIC_MAPPING = """
|
2016-10-02 17:40:02 +00:00
|
|
|
location %(url)s {
|
2016-10-02 17:44:28 +00:00
|
|
|
sendfile on;
|
2016-10-02 17:40:02 +00:00
|
|
|
sendfile_max_chunk 1m;
|
2016-10-02 17:44:28 +00:00
|
|
|
tcp_nopush on;
|
|
|
|
directio 8m;
|
|
|
|
aio threads;
|
2016-10-02 17:40:02 +00:00
|
|
|
alias %(path)s;
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
# === Utility functions ===
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2016-03-26 21:33:02 +00:00
|
|
|
def sanitize_app_name(app):
|
2016-03-26 17:14:13 +00:00
|
|
|
"""Sanitize the app name and build matching path"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2017-05-14 22:34:29 +00:00
|
|
|
app = "".join(c for c in app if c.isalnum() or c in ('.','_')).rstrip().lstrip('/')
|
2016-03-26 21:33:02 +00:00
|
|
|
return app
|
2016-03-26 17:14:13 +00:00
|
|
|
|
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
def exit_if_invalid(app):
|
|
|
|
"""Utility function for error checking upon command startup."""
|
|
|
|
|
|
|
|
app = sanitize_app_name(app)
|
|
|
|
if not exists(join(APP_ROOT, app)):
|
|
|
|
echo("Error: app '%s' not found." % app, fg='red')
|
|
|
|
exit(1)
|
|
|
|
return app
|
|
|
|
|
|
|
|
|
2016-03-26 12:52:54 +00:00
|
|
|
def get_free_port(address=""):
|
2016-03-28 18:28:24 +00:00
|
|
|
"""Find a free TCP port (entirely at random)"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-09-10 09:37:37 +00:00
|
|
|
s = socket(AF_INET, SOCK_STREAM)
|
2016-03-26 12:52:54 +00:00
|
|
|
s.bind((address,0))
|
|
|
|
port = s.getsockname()[1]
|
|
|
|
s.close()
|
|
|
|
return port
|
2016-03-26 17:28:01 +00:00
|
|
|
|
|
|
|
|
2016-04-02 16:38:53 +00:00
|
|
|
def write_config(filename, bag, separator='='):
|
|
|
|
"""Helper for writing out config files"""
|
|
|
|
|
|
|
|
with open(filename, 'w') as h:
|
2018-06-14 21:03:32 +00:00
|
|
|
for k, v in bag.items():
|
2016-04-02 21:32:10 +00:00
|
|
|
h.write('%s%s%s\n' % (k,separator,str(v)))
|
2016-04-02 16:38:53 +00:00
|
|
|
|
|
|
|
|
2016-03-26 12:52:54 +00:00
|
|
|
def setup_authorized_keys(ssh_fingerprint, script_path, pubkey):
|
2016-03-26 17:39:23 +00:00
|
|
|
"""Sets up an authorized_keys file to redirect SSH commands"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-09-10 09:37:37 +00:00
|
|
|
authorized_keys = join(environ['HOME'],'.ssh','authorized_keys')
|
2016-03-27 12:18:04 +00:00
|
|
|
if not exists(dirname(authorized_keys)):
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(dirname(authorized_keys))
|
2016-03-26 17:39:23 +00:00
|
|
|
# Restrict features and force all SSH commands to go through our script
|
2016-03-28 17:40:52 +00:00
|
|
|
with open(authorized_keys, 'a') as h:
|
|
|
|
h.write("""command="FINGERPRINT=%(ssh_fingerprint)s NAME=default %(script_path)s $SSH_ORIGINAL_COMMAND",no-agent-forwarding,no-user-rc,no-X11-forwarding,no-port-forwarding %(pubkey)s\n""" % locals())
|
2016-09-10 09:37:37 +00:00
|
|
|
chmod(dirname(authorized_keys), S_IRUSR | S_IWUSR | S_IXUSR)
|
|
|
|
chmod(authorized_keys, S_IRUSR | S_IWUSR)
|
2016-04-03 16:14:15 +00:00
|
|
|
|
2016-03-27 12:12:13 +00:00
|
|
|
|
2016-03-29 19:24:17 +00:00
|
|
|
def parse_procfile(filename):
|
|
|
|
"""Parses a Procfile and returns the worker types. Only one worker of each type is allowed."""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-29 19:24:17 +00:00
|
|
|
workers = {}
|
|
|
|
if not exists(filename):
|
|
|
|
return None
|
|
|
|
with open(filename, 'r') as procfile:
|
|
|
|
for line in procfile:
|
2016-03-29 19:38:48 +00:00
|
|
|
try:
|
|
|
|
kind, command = map(lambda x: x.strip(), line.split(":", 1))
|
2016-11-20 10:05:35 +00:00
|
|
|
workers[kind] = command
|
2016-03-29 19:38:48 +00:00
|
|
|
except:
|
2016-11-20 10:05:35 +00:00
|
|
|
echo("Warning: unrecognized Procfile entry '%s'" % line, fg='yellow')
|
2016-03-29 19:38:48 +00:00
|
|
|
if not len(workers):
|
2016-04-02 16:38:53 +00:00
|
|
|
return {}
|
2016-03-29 19:24:17 +00:00
|
|
|
# WSGI trumps regular web workers
|
|
|
|
if 'wsgi' in workers:
|
|
|
|
if 'web' in workers:
|
|
|
|
del(workers['web'])
|
|
|
|
return workers
|
2016-03-31 22:39:29 +00:00
|
|
|
|
|
|
|
|
2016-04-25 09:24:35 +00:00
|
|
|
def expandvars(buffer, env, default=None, skip_escaped=False):
|
|
|
|
"""expand shell-style environment variables in a buffer"""
|
|
|
|
|
|
|
|
def replace_var(match):
|
|
|
|
return env.get(match.group(2) or match.group(1), match.group(0) if default is None else default)
|
|
|
|
|
|
|
|
pattern = (r'(?<!\\)' if skip_escaped else '') + r'\$(\w+|\{([^}]*)\})'
|
2016-09-10 09:37:37 +00:00
|
|
|
return sub(pattern, replace_var, buffer)
|
2016-04-25 09:24:35 +00:00
|
|
|
|
|
|
|
|
2016-04-25 12:23:00 +00:00
|
|
|
def command_output(cmd):
|
|
|
|
"""executes a command and grabs its output, if any"""
|
|
|
|
try:
|
2016-09-10 09:37:37 +00:00
|
|
|
env = environ
|
2017-05-14 22:34:29 +00:00
|
|
|
return str(check_output(cmd, stderr=STDOUT, env=env, shell=True))
|
2016-04-25 12:23:00 +00:00
|
|
|
except:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
2016-04-01 23:51:49 +00:00
|
|
|
def parse_settings(filename, env={}):
|
2016-03-31 22:39:29 +00:00
|
|
|
"""Parses a settings file and returns a dict with environment variables"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-31 22:39:29 +00:00
|
|
|
if not exists(filename):
|
2016-04-02 15:46:03 +00:00
|
|
|
return {}
|
2016-04-25 09:24:35 +00:00
|
|
|
|
2016-03-31 22:39:29 +00:00
|
|
|
with open(filename, 'r') as settings:
|
|
|
|
for line in settings:
|
2016-04-02 21:32:10 +00:00
|
|
|
if '#' == line[0]: # allow for comments
|
2016-04-02 19:13:57 +00:00
|
|
|
continue
|
2016-03-31 22:39:29 +00:00
|
|
|
try:
|
|
|
|
k, v = map(lambda x: x.strip(), line.split("=", 1))
|
2016-04-01 23:51:49 +00:00
|
|
|
env[k] = expandvars(v, env)
|
2016-03-31 22:39:29 +00:00
|
|
|
except:
|
2016-04-02 16:38:53 +00:00
|
|
|
echo("Error: malformed setting '%s', ignoring file." % line, fg='red')
|
|
|
|
return {}
|
2016-03-31 22:39:29 +00:00
|
|
|
return env
|
2018-06-24 20:54:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check_requirements(binaries):
|
|
|
|
"""Checks if all the binaries exist and are executable"""
|
|
|
|
|
|
|
|
echo("-----> Checking requirements: %s" % str(binaries), fg='green')
|
|
|
|
requirements = list(map(which, binaries))
|
|
|
|
echo(str(requirements))
|
|
|
|
|
|
|
|
if None in requirements:
|
|
|
|
return False
|
|
|
|
return True
|
2016-03-29 19:24:17 +00:00
|
|
|
|
2016-03-27 12:12:13 +00:00
|
|
|
|
2016-04-06 10:59:42 +00:00
|
|
|
def do_deploy(app, deltas={}):
|
2016-03-27 12:12:13 +00:00
|
|
|
"""Deploy an app by resetting the work directory"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-27 12:12:13 +00:00
|
|
|
app_path = join(APP_ROOT, app)
|
2016-03-29 19:24:17 +00:00
|
|
|
procfile = join(app_path, 'Procfile')
|
2016-04-01 23:16:10 +00:00
|
|
|
log_path = join(LOG_ROOT, app)
|
|
|
|
|
2016-03-27 12:12:13 +00:00
|
|
|
env = {'GIT_WORK_DIR': app_path}
|
|
|
|
if exists(app_path):
|
|
|
|
echo("-----> Deploying app '%s'" % app, fg='green')
|
2016-03-27 12:19:09 +00:00
|
|
|
call('git pull --quiet', cwd=app_path, env=env, shell=True)
|
|
|
|
call('git checkout -f', cwd=app_path, env=env, shell=True)
|
2016-04-01 23:16:10 +00:00
|
|
|
if not exists(log_path):
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(log_path)
|
2016-03-29 19:24:17 +00:00
|
|
|
workers = parse_procfile(procfile)
|
2017-02-16 22:39:51 +00:00
|
|
|
if workers and len(workers):
|
2016-03-29 19:24:17 +00:00
|
|
|
if exists(join(app_path, 'requirements.txt')):
|
|
|
|
echo("-----> Python app detected.", fg='green')
|
2016-04-06 10:59:42 +00:00
|
|
|
deploy_python(app, deltas)
|
2018-06-24 20:54:29 +00:00
|
|
|
elif exists(join(app_path, 'package.json')) and check_requirements(['nodeenv', 'node', 'npm']):
|
|
|
|
echo("-----> Node app detected.", fg='green')
|
|
|
|
deploy_node(app, deltas)
|
|
|
|
elif exists(join(app_path, 'pom.xml')) and check_requirements(['java', 'mvn']):
|
|
|
|
echo("-----> Java app detected.", fg='green')
|
|
|
|
deploy_java(app, deltas)
|
|
|
|
elif (exists(join(app_path, 'Godeps')) or len(glob(join(app_path,'*.go')))) and check_requirements(['go']):
|
2016-04-06 22:56:49 +00:00
|
|
|
echo("-----> Go app detected.", fg='green')
|
|
|
|
deploy_go(app, deltas)
|
2016-03-29 19:24:17 +00:00
|
|
|
else:
|
|
|
|
echo("-----> Could not detect runtime!", fg='red')
|
|
|
|
# TODO: detect other runtimes
|
2016-03-28 18:28:24 +00:00
|
|
|
else:
|
2016-04-02 21:32:10 +00:00
|
|
|
echo("Error: Invalid Procfile for app '%s'." % app, fg='red')
|
2016-03-27 12:12:13 +00:00
|
|
|
else:
|
|
|
|
echo("Error: app '%s' not found." % app, fg='red')
|
2016-03-28 17:40:52 +00:00
|
|
|
|
|
|
|
|
2016-04-06 22:56:49 +00:00
|
|
|
def deploy_go(app, deltas={}):
|
|
|
|
"""Deploy a Go application"""
|
|
|
|
|
|
|
|
go_path = join(ENV_ROOT, app)
|
|
|
|
deps = join(APP_ROOT, app, 'Godeps')
|
|
|
|
|
|
|
|
first_time = False
|
|
|
|
if not exists(go_path):
|
|
|
|
echo("-----> Creating GOPATH for '%s'" % app, fg='green')
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(go_path)
|
2016-04-06 22:56:49 +00:00
|
|
|
# copy across a pre-built GOPATH to save provisioning time
|
|
|
|
call('cp -a $HOME/gopath %s' % app, cwd=ENV_ROOT, shell=True)
|
|
|
|
first_time = True
|
|
|
|
|
|
|
|
if exists(deps):
|
|
|
|
if first_time or getmtime(deps) > getmtime(go_path):
|
|
|
|
echo("-----> Running godep for '%s'" % app, fg='green')
|
|
|
|
env = {
|
|
|
|
'GOPATH': '$HOME/gopath',
|
|
|
|
'GOROOT': '$HOME/go',
|
|
|
|
'PATH': '$PATH:$HOME/go/bin',
|
|
|
|
'GO15VENDOREXPERIMENT': '1'
|
|
|
|
}
|
|
|
|
call('godep update ...', cwd=join(APP_ROOT, app), env=env, shell=True)
|
|
|
|
spawn_app(app, deltas)
|
|
|
|
|
|
|
|
|
2018-06-24 20:54:29 +00:00
|
|
|
def deploy_node(app, deltas={}):
|
|
|
|
"""Deploy a Node application"""
|
|
|
|
|
|
|
|
node_path = join(ENV_ROOT, app)
|
|
|
|
deps = join(APP_ROOT, app, 'node_modules')
|
|
|
|
|
|
|
|
first_time = False
|
|
|
|
if not exists(deps):
|
|
|
|
echo("-----> Creating nodeenv for '%s'" % app, fg='green')
|
|
|
|
makedirs(node_path)
|
|
|
|
call('nodeenv --node=system %s' % app, cwd=ENV_ROOT, shell=True)
|
|
|
|
first_time = True
|
|
|
|
|
|
|
|
if exists(deps):
|
|
|
|
if first_time or getmtime(deps) > getmtime(go_path):
|
|
|
|
echo("-----> Running npm for '%s'" % app, fg='green')
|
|
|
|
env = {
|
|
|
|
'NODE_PATH': '%s/lib/node_modules' % node_path,
|
|
|
|
'NPM_CONFIG_PREFIX': node_path,
|
|
|
|
'PATH': '%s:$PATH' % node_path
|
|
|
|
}
|
|
|
|
call('npm install', cwd=join(APP_ROOT, app), env=env, shell=True)
|
|
|
|
spawn_app(app, deltas)
|
|
|
|
|
|
|
|
|
2016-04-06 10:59:42 +00:00
|
|
|
def deploy_python(app, deltas={}):
|
2016-03-28 18:28:24 +00:00
|
|
|
"""Deploy a Python application"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-04-01 23:16:10 +00:00
|
|
|
virtualenv_path = join(ENV_ROOT, app)
|
2016-04-01 21:53:08 +00:00
|
|
|
requirements = join(APP_ROOT, app, 'requirements.txt')
|
2016-11-15 11:04:15 +00:00
|
|
|
env_file = join(APP_ROOT, app, 'ENV')
|
|
|
|
# Peek at environment variables shipped with repo (if any) to determine version
|
|
|
|
env = {}
|
|
|
|
if exists(env_file):
|
|
|
|
env.update(parse_settings(env_file, env))
|
|
|
|
|
2018-06-14 21:16:09 +00:00
|
|
|
version = int(env.get("PYTHON_VERSION", "3"))
|
2016-04-01 21:50:18 +00:00
|
|
|
|
2016-04-01 23:22:40 +00:00
|
|
|
first_time = False
|
2016-04-01 21:50:18 +00:00
|
|
|
if not exists(virtualenv_path):
|
2016-03-28 17:40:52 +00:00
|
|
|
echo("-----> Creating virtualenv for '%s'" % app, fg='green')
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(virtualenv_path)
|
2016-11-15 11:04:15 +00:00
|
|
|
call('virtualenv --python=python%d %s' % (version, app), cwd=ENV_ROOT, shell=True)
|
2016-04-01 23:22:40 +00:00
|
|
|
first_time = True
|
2016-03-28 18:28:24 +00:00
|
|
|
|
2016-04-06 10:59:42 +00:00
|
|
|
activation_script = join(virtualenv_path,'bin','activate_this.py')
|
2018-06-14 21:03:32 +00:00
|
|
|
exec(open(activation_script).read(), dict(__file__=activation_script))
|
2016-04-06 10:59:42 +00:00
|
|
|
|
2016-04-01 23:22:40 +00:00
|
|
|
if first_time or getmtime(requirements) > getmtime(virtualenv_path):
|
2016-04-01 21:50:18 +00:00
|
|
|
echo("-----> Running pip for '%s'" % app, fg='green')
|
|
|
|
call('pip install -r %s' % requirements, cwd=virtualenv_path, shell=True)
|
2016-04-06 10:59:42 +00:00
|
|
|
spawn_app(app, deltas)
|
2016-03-31 22:26:52 +00:00
|
|
|
|
|
|
|
|
2016-04-02 21:32:10 +00:00
|
|
|
def spawn_app(app, deltas={}):
|
2016-03-31 22:42:13 +00:00
|
|
|
"""Create all workers for an app"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-04-02 21:32:10 +00:00
|
|
|
app_path = join(APP_ROOT, app)
|
|
|
|
procfile = join(app_path, 'Procfile')
|
|
|
|
workers = parse_procfile(procfile)
|
2016-04-02 16:38:53 +00:00
|
|
|
ordinals = defaultdict(lambda:1)
|
2016-04-02 21:32:10 +00:00
|
|
|
worker_count = {k:1 for k in workers.keys()}
|
|
|
|
|
2016-04-01 21:50:18 +00:00
|
|
|
# the Python virtualenv
|
2016-04-01 23:16:10 +00:00
|
|
|
virtualenv_path = join(ENV_ROOT, app)
|
2016-04-01 21:50:18 +00:00
|
|
|
# Settings shipped with the app
|
|
|
|
env_file = join(APP_ROOT, app, 'ENV')
|
|
|
|
# Custom overrides
|
2016-04-01 23:16:10 +00:00
|
|
|
settings = join(ENV_ROOT, app, 'ENV')
|
2016-04-02 15:46:03 +00:00
|
|
|
# Live settings
|
|
|
|
live = join(ENV_ROOT, app, 'LIVE_ENV')
|
2016-04-02 16:38:53 +00:00
|
|
|
# Scaling
|
|
|
|
scaling = join(ENV_ROOT, app, 'SCALING')
|
2016-04-25 12:23:00 +00:00
|
|
|
|
2016-04-03 10:35:39 +00:00
|
|
|
# Bootstrap environment
|
2016-03-31 22:39:29 +00:00
|
|
|
env = {
|
2016-04-25 09:35:34 +00:00
|
|
|
'APP': app,
|
|
|
|
'LOG_ROOT': LOG_ROOT,
|
2016-09-10 09:37:37 +00:00
|
|
|
'HOME': environ['HOME'],
|
|
|
|
'USER': environ['USER'],
|
|
|
|
'PATH': environ['PATH'],
|
2016-04-01 23:29:59 +00:00
|
|
|
'PWD': dirname(env_file),
|
2016-04-25 10:50:37 +00:00
|
|
|
'VIRTUAL_ENV': virtualenv_path,
|
2016-03-31 22:39:29 +00:00
|
|
|
}
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-31 22:39:29 +00:00
|
|
|
# Load environment variables shipped with repo (if any)
|
|
|
|
if exists(env_file):
|
2016-04-01 23:51:49 +00:00
|
|
|
env.update(parse_settings(env_file, env))
|
2016-04-03 10:35:39 +00:00
|
|
|
|
2016-03-31 22:39:29 +00:00
|
|
|
# Override with custom settings (if any)
|
|
|
|
if exists(settings):
|
2016-04-01 23:51:49 +00:00
|
|
|
env.update(parse_settings(settings, env))
|
2016-04-25 12:23:00 +00:00
|
|
|
|
2016-05-03 21:27:23 +00:00
|
|
|
if 'web' in workers or 'wsgi' in workers:
|
|
|
|
# Pick a port if none defined and we're not running under nginx
|
2016-05-14 17:31:02 +00:00
|
|
|
if 'PORT' not in env and 'NGINX_SERVER_NAME' not in env:
|
2016-05-03 21:27:23 +00:00
|
|
|
env['PORT'] = str(get_free_port())
|
|
|
|
|
|
|
|
# Safe default for bind address
|
|
|
|
if 'BIND_ADDRESS' not in env:
|
|
|
|
env['BIND_ADDRESS'] = '127.0.0.1'
|
|
|
|
|
2016-05-14 17:31:02 +00:00
|
|
|
# Set up nginx if we have NGINX_SERVER_NAME set
|
|
|
|
if 'NGINX_SERVER_NAME' in env:
|
2016-05-03 21:27:23 +00:00
|
|
|
nginx = command_output("nginx -V")
|
|
|
|
nginx_ssl = "443 ssl"
|
|
|
|
if "--with-http_v2_module" in nginx:
|
|
|
|
nginx_ssl += " http2"
|
2016-05-03 22:22:13 +00:00
|
|
|
elif "--with-http_spdy_module" in nginx and "nginx/1.6.2" not in nginx: # avoid Raspbian bug
|
2016-05-03 21:27:23 +00:00
|
|
|
nginx_ssl += " spdy"
|
2016-05-03 18:51:47 +00:00
|
|
|
|
2016-05-03 21:27:23 +00:00
|
|
|
env.update({
|
|
|
|
'NGINX_SSL': nginx_ssl,
|
|
|
|
'NGINX_ROOT': NGINX_ROOT,
|
|
|
|
})
|
|
|
|
|
|
|
|
if 'wsgi' in workers:
|
|
|
|
sock = join(NGINX_ROOT, "%s.sock" % app)
|
|
|
|
env['NGINX_SOCKET'] = env['BIND_ADDRESS'] = "unix://" + sock
|
2016-05-05 20:09:41 +00:00
|
|
|
if 'PORT' in env:
|
|
|
|
del env['PORT']
|
2016-05-03 21:27:23 +00:00
|
|
|
else:
|
|
|
|
env['NGINX_SOCKET'] = "%(BIND_ADDRESS)s:%(PORT)s" % env
|
|
|
|
|
2016-05-14 17:31:02 +00:00
|
|
|
domain = env['NGINX_SERVER_NAME'].split()[0]
|
2016-05-03 21:27:23 +00:00
|
|
|
key, crt = [join(NGINX_ROOT,'%s.%s' % (app,x)) for x in ['key','crt']]
|
|
|
|
if not exists(key):
|
|
|
|
call('openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=NY/L=New York/O=Piku/OU=Self-Signed/CN=%(domain)s" -keyout %(key)s -out %(crt)s' % locals(), shell=True)
|
2016-05-05 19:00:25 +00:00
|
|
|
|
|
|
|
# restrict access to server from CloudFlare IP addresses
|
2016-05-05 19:21:33 +00:00
|
|
|
acl = []
|
2016-05-14 17:31:02 +00:00
|
|
|
if env.get('NGINX_CLOUDFLARE_ACL', 'false').lower() == 'true':
|
2016-05-05 19:00:25 +00:00
|
|
|
try:
|
2016-05-05 19:18:15 +00:00
|
|
|
cf = loads(urlopen('https://api.cloudflare.com/client/v4/ips').read())
|
2017-05-14 22:34:29 +00:00
|
|
|
except Exception as e:
|
2016-05-05 19:00:25 +00:00
|
|
|
cf = defaultdict()
|
2016-05-05 20:13:03 +00:00
|
|
|
echo("-----> Could not retrieve CloudFlare IP ranges: %s" % e.text, fg="red")
|
2016-05-05 19:00:25 +00:00
|
|
|
if cf['success'] == True:
|
|
|
|
for i in cf['result']['ipv4_cidrs']:
|
2016-05-05 19:21:33 +00:00
|
|
|
acl.append("allow %s;" % i)
|
2016-05-05 19:00:25 +00:00
|
|
|
for i in cf['result']['ipv6_cidrs']:
|
2016-05-05 19:21:33 +00:00
|
|
|
acl.append("allow %s;" % i)
|
2016-05-05 19:00:25 +00:00
|
|
|
# allow access from controlling machine
|
2016-09-10 09:37:37 +00:00
|
|
|
if 'SSH_CLIENT' in environ:
|
|
|
|
remote_ip = environ['SSH_CLIENT'].split()[0]
|
2016-05-05 20:13:03 +00:00
|
|
|
echo("-----> Adding your IP (%s) to nginx ACL" % remote_ip)
|
2016-05-05 20:05:31 +00:00
|
|
|
acl.append("allow %s;" % remote_ip)
|
2016-05-05 19:21:33 +00:00
|
|
|
acl.extend(["allow 127.0.0.1;","deny all;"])
|
2016-09-09 23:46:04 +00:00
|
|
|
env['NGINX_ACL'] = " ".join(acl)
|
2016-10-02 17:40:02 +00:00
|
|
|
|
2017-10-07 11:41:07 +00:00
|
|
|
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = ''
|
2016-05-05 19:00:25 +00:00
|
|
|
|
2016-10-02 17:40:02 +00:00
|
|
|
# Get a mapping of /url:path1,/url2:path2
|
|
|
|
static_paths = env.get('NGINX_STATIC_PATHS','')
|
|
|
|
if len(static_paths):
|
|
|
|
try:
|
|
|
|
items = static_paths.split(',')
|
|
|
|
for item in items:
|
|
|
|
static_url, static_path = item.split(':')
|
|
|
|
if static_path[0] != '/':
|
|
|
|
static_path = join(app_path, static_path)
|
2017-10-07 11:41:07 +00:00
|
|
|
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = env['INTERNAL_NGINX_STATIC_MAPPINGS'] + INTERNAL_NGINX_STATIC_MAPPING % {'url': static_url, 'path': static_path}
|
2016-10-02 17:40:02 +00:00
|
|
|
except Exception as e:
|
2017-05-14 22:34:29 +00:00
|
|
|
echo("Error %s in static path spec: should be /url1:path1[,/url2:path2], ignoring." % e)
|
2017-10-07 11:41:07 +00:00
|
|
|
env['INTERNAL_NGINX_STATIC_MAPPINGS'] = ''
|
2016-10-02 17:40:02 +00:00
|
|
|
|
2016-05-03 21:27:23 +00:00
|
|
|
buffer = expandvars(NGINX_TEMPLATE, env)
|
2016-05-14 17:31:02 +00:00
|
|
|
echo("-----> Setting up nginx for '%s:%s'" % (app, env['NGINX_SERVER_NAME']))
|
2016-05-03 21:27:23 +00:00
|
|
|
with open(join(NGINX_ROOT,"%s.conf" % app), "w") as h:
|
2017-10-07 11:41:07 +00:00
|
|
|
h.write(buffer)
|
2016-04-25 12:23:00 +00:00
|
|
|
|
2016-04-02 21:32:10 +00:00
|
|
|
# Configured worker count
|
2016-04-02 16:38:53 +00:00
|
|
|
if exists(scaling):
|
2018-06-14 21:03:32 +00:00
|
|
|
worker_count.update({k: int(v) for k,v in parse_procfile(scaling).items()})
|
2016-04-02 21:32:10 +00:00
|
|
|
|
|
|
|
to_create = {}
|
|
|
|
to_destroy = {}
|
2018-06-14 21:03:32 +00:00
|
|
|
for k, v in worker_count.items():
|
2016-04-02 21:32:10 +00:00
|
|
|
to_create[k] = range(1,worker_count[k] + 1)
|
2016-04-02 22:37:42 +00:00
|
|
|
if k in deltas and deltas[k]:
|
2016-04-02 21:32:10 +00:00
|
|
|
to_create[k] = range(1, worker_count[k] + deltas[k] + 1)
|
2016-04-02 22:37:42 +00:00
|
|
|
if deltas[k] < 0:
|
|
|
|
to_destroy[k] = range(worker_count[k], worker_count[k] + deltas[k], -1)
|
2016-04-02 21:32:10 +00:00
|
|
|
worker_count[k] = worker_count[k]+deltas[k]
|
2016-04-02 22:37:42 +00:00
|
|
|
|
2017-10-07 11:41:07 +00:00
|
|
|
# Cleanup env
|
2018-06-14 21:26:53 +00:00
|
|
|
for k, v in list(env.items()):
|
2017-10-07 11:41:07 +00:00
|
|
|
if k.startswith('INTERNAL_'):
|
|
|
|
del env[k]
|
|
|
|
|
2016-04-02 22:37:42 +00:00
|
|
|
# Save current settings
|
|
|
|
write_config(live, env)
|
|
|
|
write_config(scaling, worker_count, ':')
|
|
|
|
|
|
|
|
# Create new workers
|
2018-06-14 21:03:32 +00:00
|
|
|
for k, v in to_create.items():
|
2016-04-02 22:37:42 +00:00
|
|
|
for w in v:
|
|
|
|
enabled = join(UWSGI_ENABLED, '%s_%s.%d.ini' % (app, k, w))
|
|
|
|
if not exists(enabled):
|
2016-04-03 21:43:19 +00:00
|
|
|
echo("-----> Spawning '%s:%s.%d'" % (app, k, w), fg='green')
|
2016-04-02 22:37:42 +00:00
|
|
|
spawn_worker(app, k, workers[k], env, w)
|
2016-04-02 21:32:10 +00:00
|
|
|
|
|
|
|
# Remove unnecessary workers (leave logfiles)
|
2018-06-14 21:03:32 +00:00
|
|
|
for k, v in to_destroy.items():
|
2016-04-02 21:32:10 +00:00
|
|
|
for w in v:
|
|
|
|
enabled = join(UWSGI_ENABLED, '%s_%s.%d.ini' % (app, k, w))
|
|
|
|
if exists(enabled):
|
|
|
|
echo("-----> Terminating '%s:%s.%d'" % (app, k, w), fg='yellow')
|
2016-09-10 09:37:37 +00:00
|
|
|
unlink(enabled)
|
2016-04-02 21:32:10 +00:00
|
|
|
|
2016-03-31 22:26:52 +00:00
|
|
|
|
2016-04-02 21:32:10 +00:00
|
|
|
def spawn_worker(app, kind, command, env, ordinal=1):
|
2016-03-31 22:42:13 +00:00
|
|
|
"""Set up and deploy a single worker of a given kind"""
|
2016-04-01 21:50:18 +00:00
|
|
|
|
2016-05-03 21:18:03 +00:00
|
|
|
env['PROC_TYPE'] = kind
|
2016-03-31 22:26:52 +00:00
|
|
|
env_path = join(ENV_ROOT, app)
|
2016-04-02 18:31:28 +00:00
|
|
|
available = join(UWSGI_AVAILABLE, '%s_%s.%d.ini' % (app, kind, ordinal))
|
|
|
|
enabled = join(UWSGI_ENABLED, '%s_%s.%d.ini' % (app, kind, ordinal))
|
2016-03-28 18:28:24 +00:00
|
|
|
|
2016-03-29 19:24:17 +00:00
|
|
|
settings = [
|
2016-05-30 20:37:37 +00:00
|
|
|
('virtualenv', join(ENV_ROOT, app)),
|
|
|
|
('chdir', join(APP_ROOT, app)),
|
|
|
|
('master', 'true'),
|
|
|
|
('project', app),
|
|
|
|
('max-requests', env.get('UWSGI_MAX_REQUESTS', '1024')),
|
2016-08-15 21:55:01 +00:00
|
|
|
('listen', env.get('UWSGI_LISTEN', '16')),
|
2016-05-30 20:37:37 +00:00
|
|
|
('processes', env.get('UWSGI_PROCESSES', '1')),
|
|
|
|
('procname-prefix', '%s:%s:' % (app, kind)),
|
|
|
|
('enable-threads', env.get('UWSGI_ENABLE_THREADS', 'true').lower()),
|
|
|
|
('log-x-forwarded-for', env.get('UWSGI_LOG_X_FORWARDED_FOR', 'false').lower()),
|
2016-05-30 20:45:47 +00:00
|
|
|
('log-maxsize', env.get('UWSGI_LOG_MAXSIZE', UWSGI_LOG_MAXSIZE)),
|
2016-05-30 20:37:37 +00:00
|
|
|
('logto', '%s.%d.log' % (join(LOG_ROOT, app, kind), ordinal)),
|
|
|
|
('log-backupname', '%s.%d.log.old' % (join(LOG_ROOT, app, kind), ordinal)),
|
2016-03-29 19:24:17 +00:00
|
|
|
]
|
2016-11-15 11:04:15 +00:00
|
|
|
|
2018-06-14 21:16:09 +00:00
|
|
|
python_version = int(env.get('PYTHON_VERSION','3'))
|
2016-11-15 11:04:15 +00:00
|
|
|
|
2016-03-31 22:26:52 +00:00
|
|
|
if kind == 'wsgi':
|
|
|
|
settings.extend([
|
2016-04-03 23:03:01 +00:00
|
|
|
('module', command),
|
2016-05-14 18:15:21 +00:00
|
|
|
('threads', env.get('UWSGI_THREADS','4')),
|
2016-04-01 22:26:37 +00:00
|
|
|
])
|
2016-11-15 11:04:15 +00:00
|
|
|
if python_version == 2:
|
2016-05-14 17:31:02 +00:00
|
|
|
settings.extend([
|
2016-11-15 11:04:15 +00:00
|
|
|
('plugin', 'python'),
|
2016-05-14 17:31:02 +00:00
|
|
|
])
|
2016-11-15 11:04:15 +00:00
|
|
|
if 'UWSGI_GEVENT' in env:
|
|
|
|
settings.extend([
|
|
|
|
('plugin', 'gevent_python'),
|
|
|
|
('gevent', env['UWSGI_GEVENT']),
|
|
|
|
])
|
|
|
|
elif 'UWSGI_ASYNCIO' in env:
|
|
|
|
settings.extend([
|
|
|
|
('plugin', 'asyncio_python'),
|
|
|
|
])
|
|
|
|
elif python_version == 3:
|
|
|
|
settings.extend([
|
|
|
|
('plugin', 'python3'),
|
|
|
|
])
|
|
|
|
if 'UWSGI_ASYNCIO' in env:
|
|
|
|
settings.extend([
|
|
|
|
('plugin', 'asyncio_python3'),
|
|
|
|
])
|
|
|
|
|
2016-04-25 12:23:00 +00:00
|
|
|
|
|
|
|
# If running under nginx, don't expose a port at all
|
2016-05-14 17:31:02 +00:00
|
|
|
if 'NGINX_SERVER_NAME' in env:
|
2016-04-25 12:32:52 +00:00
|
|
|
sock = join(NGINX_ROOT, "%s.sock" % app)
|
|
|
|
echo("-----> Binding uWSGI to %s" % sock , fg='yellow')
|
2016-04-25 12:23:00 +00:00
|
|
|
settings.extend([
|
2016-04-25 12:32:52 +00:00
|
|
|
('socket', sock),
|
2016-04-25 12:58:47 +00:00
|
|
|
('chmod-socket', '664'),
|
2016-04-25 12:23:00 +00:00
|
|
|
])
|
|
|
|
else:
|
2016-05-03 18:51:47 +00:00
|
|
|
echo("-----> Setting HTTP to listen on %(BIND_ADDRESS)s:%(PORT)s" % env, fg='yellow')
|
2016-04-25 12:23:00 +00:00
|
|
|
settings.extend([
|
2016-05-03 18:51:47 +00:00
|
|
|
('http', '%(BIND_ADDRESS)s:%(PORT)s' % env),
|
|
|
|
('http-socket', '%(BIND_ADDRESS)s:%(PORT)s' % env),
|
2016-04-25 12:23:00 +00:00
|
|
|
])
|
2016-05-03 18:51:47 +00:00
|
|
|
elif kind == 'web':
|
|
|
|
echo("-----> Setting HTTP to listen on %(BIND_ADDRESS)s:%(PORT)s" % env, fg='yellow')
|
|
|
|
settings.extend([
|
|
|
|
('http', '%(BIND_ADDRESS)s:%(PORT)s' % env),
|
|
|
|
('http-socket', '%(BIND_ADDRESS)s:%(PORT)s' % env),
|
|
|
|
])
|
2016-03-29 19:24:17 +00:00
|
|
|
else:
|
2016-03-31 22:26:52 +00:00
|
|
|
settings.append(('attach-daemon', command))
|
2016-04-01 21:50:18 +00:00
|
|
|
|
2016-05-14 17:56:15 +00:00
|
|
|
if kind in ['wsgi','web']:
|
|
|
|
settings.append(('log-format','%%(addr) - %%(user) [%%(ltime)] "%%(method) %%(uri) %%(proto)" %%(status) %%(size) "%%(referer)" "%%(uagent)" %%(msecs)ms'))
|
|
|
|
|
2016-05-14 18:10:10 +00:00
|
|
|
# remove unnecessary variables from the env in nginx.ini
|
|
|
|
for k in ['NGINX_ACL']:
|
2016-05-14 18:16:09 +00:00
|
|
|
if k in env:
|
2016-05-14 18:10:10 +00:00
|
|
|
del env[k]
|
|
|
|
|
2018-06-14 21:03:32 +00:00
|
|
|
for k, v in env.items():
|
2016-05-03 21:18:03 +00:00
|
|
|
settings.append(('env', '%s=%s' % (k,v)))
|
|
|
|
|
2016-03-28 18:11:19 +00:00
|
|
|
with open(available, 'w') as h:
|
2016-03-29 19:24:17 +00:00
|
|
|
h.write('[uwsgi]\n')
|
|
|
|
for k, v in settings:
|
|
|
|
h.write("%s = %s\n" % (k, v))
|
2016-04-01 23:16:10 +00:00
|
|
|
|
2016-09-10 09:37:37 +00:00
|
|
|
copyfile(available, enabled)
|
2016-04-01 22:26:37 +00:00
|
|
|
|
|
|
|
|
2016-04-03 10:35:39 +00:00
|
|
|
def multi_tail(app, filenames, catch_up=20):
|
2016-04-01 22:26:37 +00:00
|
|
|
"""Tails multiple log files"""
|
|
|
|
|
2016-04-03 10:35:39 +00:00
|
|
|
# Seek helper
|
2016-04-01 22:26:37 +00:00
|
|
|
def peek(handle):
|
|
|
|
where = handle.tell()
|
|
|
|
line = handle.readline()
|
|
|
|
if not line:
|
|
|
|
handle.seek(where)
|
|
|
|
return None
|
|
|
|
return line
|
|
|
|
|
|
|
|
inodes = {}
|
|
|
|
files = {}
|
|
|
|
prefixes = {}
|
|
|
|
|
2016-04-03 10:35:39 +00:00
|
|
|
# Set up current state for each log file
|
2016-04-01 22:26:37 +00:00
|
|
|
for f in filenames:
|
2016-04-01 23:16:10 +00:00
|
|
|
prefixes[f] = splitext(basename(f))[0]
|
2016-04-01 22:26:37 +00:00
|
|
|
files[f] = open(f)
|
2016-09-10 09:37:37 +00:00
|
|
|
inodes[f] = stat(f).st_ino
|
2016-04-01 23:16:10 +00:00
|
|
|
files[f].seek(0, 2)
|
2016-04-01 22:26:37 +00:00
|
|
|
|
|
|
|
longest = max(map(len, prefixes.values()))
|
2016-04-03 10:35:39 +00:00
|
|
|
|
|
|
|
# Grab a little history (if any)
|
2016-04-01 23:16:10 +00:00
|
|
|
for f in filenames:
|
2016-04-03 10:35:39 +00:00
|
|
|
for line in deque(open(f), catch_up):
|
2016-04-01 23:16:10 +00:00
|
|
|
yield "%s | %s" % (prefixes[f].ljust(longest), line)
|
2016-04-01 22:26:37 +00:00
|
|
|
|
|
|
|
while True:
|
|
|
|
updated = False
|
2016-04-03 10:35:39 +00:00
|
|
|
# Check for updates on every file
|
2016-04-01 22:26:37 +00:00
|
|
|
for f in filenames:
|
|
|
|
line = peek(files[f])
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
updated = True
|
|
|
|
yield "%s | %s" % (prefixes[f].ljust(longest), line)
|
2016-04-03 10:35:39 +00:00
|
|
|
|
2016-04-01 22:26:37 +00:00
|
|
|
if not updated:
|
2016-04-01 23:16:10 +00:00
|
|
|
sleep(1)
|
2016-04-03 10:35:39 +00:00
|
|
|
# Check if logs rotated
|
2016-04-01 22:26:37 +00:00
|
|
|
for f in filenames:
|
2016-04-01 23:16:10 +00:00
|
|
|
if exists(f):
|
2016-09-10 09:37:37 +00:00
|
|
|
if stat(f).st_ino != inodes[f]:
|
2016-04-01 23:16:10 +00:00
|
|
|
files[f] = open(f)
|
2016-09-10 09:37:37 +00:00
|
|
|
inodes[f] = stat(f).st_ino
|
2016-04-01 23:16:10 +00:00
|
|
|
else:
|
|
|
|
filenames.remove(f)
|
2016-04-01 22:26:37 +00:00
|
|
|
|
2016-03-27 12:12:13 +00:00
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
# === CLI commands ===
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2016-03-27 12:08:30 +00:00
|
|
|
@group()
|
2016-03-26 17:28:01 +00:00
|
|
|
def piku():
|
2016-04-03 10:35:39 +00:00
|
|
|
"""The smallest PaaS you've ever seen"""
|
2016-04-03 21:36:04 +00:00
|
|
|
pass
|
2016-03-26 22:47:39 +00:00
|
|
|
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2016-03-26 17:28:01 +00:00
|
|
|
@piku.resultcallback()
|
2016-03-26 12:52:54 +00:00
|
|
|
def cleanup(ctx):
|
2016-04-06 21:46:32 +00:00
|
|
|
"""Callback from command execution -- add debugging to taste"""
|
2016-03-28 18:28:24 +00:00
|
|
|
pass
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2016-03-26 22:47:39 +00:00
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
# --- User commands ---
|
2016-03-26 12:52:54 +00:00
|
|
|
|
2016-04-06 21:46:32 +00:00
|
|
|
@piku.command("apps")
|
|
|
|
def list_apps():
|
|
|
|
"""List applications"""
|
|
|
|
|
2016-09-10 09:37:37 +00:00
|
|
|
for a in listdir(APP_ROOT):
|
2016-04-06 21:46:32 +00:00
|
|
|
echo(a, fg='green')
|
|
|
|
|
|
|
|
|
2016-04-02 15:46:03 +00:00
|
|
|
@piku.command("config")
|
|
|
|
@argument('app')
|
|
|
|
def deploy_app(app):
|
|
|
|
"""Show application configuration"""
|
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-02 15:46:03 +00:00
|
|
|
config_file = join(ENV_ROOT, app, 'ENV')
|
|
|
|
if exists(config_file):
|
2016-04-02 16:38:53 +00:00
|
|
|
echo(open(config_file).read().strip(), fg='white')
|
2016-04-05 06:27:35 +00:00
|
|
|
else:
|
|
|
|
echo("Warning: app '%s' not deployed, no config found." % app, fg='yellow')
|
2016-04-02 15:46:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
@piku.command("config:get")
|
|
|
|
@argument('app')
|
|
|
|
@argument('setting')
|
|
|
|
def deploy_app(app, setting):
|
|
|
|
"""Retrieve a configuration setting"""
|
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-02 15:46:03 +00:00
|
|
|
config_file = join(ENV_ROOT, app, 'ENV')
|
|
|
|
if exists(config_file):
|
|
|
|
env = parse_settings(config_file)
|
|
|
|
if setting in env:
|
|
|
|
echo("%s" % env[setting], fg='white')
|
2016-04-06 21:46:32 +00:00
|
|
|
else:
|
|
|
|
echo("Warning: no active configuration for '%s'" % app)
|
2016-04-02 15:46:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
@piku.command("config:set")
|
|
|
|
@argument('app')
|
|
|
|
@argument('settings', nargs=-1)
|
|
|
|
def deploy_app(app, settings):
|
2016-04-03 10:35:39 +00:00
|
|
|
"""Set a configuration setting"""
|
2016-04-02 15:46:03 +00:00
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-02 15:46:03 +00:00
|
|
|
config_file = join(ENV_ROOT, app, 'ENV')
|
|
|
|
env = parse_settings(config_file)
|
|
|
|
for s in settings:
|
|
|
|
try:
|
|
|
|
k, v = map(lambda x: x.strip(), s.split("=", 1))
|
|
|
|
env[k] = v
|
|
|
|
echo("Setting %s=%s for '%s'" % (k, v, app), fg='white')
|
|
|
|
except:
|
2016-04-02 16:38:53 +00:00
|
|
|
echo("Error: malformed setting '%s'" % s, fg='red')
|
|
|
|
return
|
|
|
|
write_config(config_file, env)
|
2016-04-02 15:46:03 +00:00
|
|
|
do_deploy(app)
|
|
|
|
|
|
|
|
|
2016-05-03 21:45:58 +00:00
|
|
|
@piku.command("config:unset")
|
|
|
|
@argument('app')
|
|
|
|
@argument('settings', nargs=-1)
|
|
|
|
def deploy_app(app, settings):
|
|
|
|
"""Set a configuration setting"""
|
|
|
|
|
|
|
|
app = exit_if_invalid(app)
|
|
|
|
|
|
|
|
config_file = join(ENV_ROOT, app, 'ENV')
|
|
|
|
env = parse_settings(config_file)
|
|
|
|
for s in settings:
|
|
|
|
if s in env:
|
|
|
|
del env[s]
|
|
|
|
echo("Unsetting %s for '%s'" % (s, app), fg='white')
|
|
|
|
write_config(config_file, env)
|
|
|
|
do_deploy(app)
|
|
|
|
|
|
|
|
|
2016-04-02 15:46:03 +00:00
|
|
|
@piku.command("config:live")
|
|
|
|
@argument('app')
|
|
|
|
def deploy_app(app):
|
2016-04-03 10:35:39 +00:00
|
|
|
"""Show live configuration settings"""
|
2016-04-02 15:46:03 +00:00
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-02 15:46:03 +00:00
|
|
|
live_config = join(ENV_ROOT, app, 'LIVE_ENV')
|
|
|
|
if exists(live_config):
|
2016-04-02 16:38:53 +00:00
|
|
|
echo(open(live_config).read().strip(), fg='white')
|
2016-04-05 06:27:35 +00:00
|
|
|
else:
|
|
|
|
echo("Warning: app '%s' not deployed, no config found." % app, fg='yellow')
|
2016-04-02 15:46:03 +00:00
|
|
|
|
|
|
|
|
2016-03-26 22:08:10 +00:00
|
|
|
@piku.command("deploy")
|
|
|
|
@argument('app')
|
|
|
|
def deploy_app(app):
|
2016-03-26 23:01:28 +00:00
|
|
|
"""Deploy an application"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-03-26 22:19:30 +00:00
|
|
|
do_deploy(app)
|
2016-03-26 22:47:39 +00:00
|
|
|
|
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
@piku.command("destroy")
|
|
|
|
@argument('app')
|
|
|
|
def destroy_app(app):
|
|
|
|
"""Destroy an application"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
for p in [join(x, app) for x in [APP_ROOT, GIT_ROOT, ENV_ROOT, LOG_ROOT]]:
|
|
|
|
if exists(p):
|
|
|
|
echo("Removing folder '%s'" % p, fg='yellow')
|
2016-09-10 09:37:37 +00:00
|
|
|
rmtree(p)
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-31 22:53:35 +00:00
|
|
|
for p in [join(x, '%s*.ini' % app) for x in [UWSGI_AVAILABLE, UWSGI_ENABLED]]:
|
|
|
|
g = glob(p)
|
|
|
|
if len(g):
|
|
|
|
for f in g:
|
|
|
|
echo("Removing file '%s'" % f, fg='yellow')
|
2016-09-10 09:37:37 +00:00
|
|
|
remove(f)
|
2016-04-25 09:32:49 +00:00
|
|
|
|
2016-04-25 12:32:52 +00:00
|
|
|
nginx_files = [join(NGINX_ROOT, "%s.%s" % (app,x)) for x in ['conf','sock','key','crt']]
|
2016-04-25 11:12:18 +00:00
|
|
|
for f in nginx_files:
|
2016-04-25 10:50:37 +00:00
|
|
|
if exists(f):
|
|
|
|
echo("Removing file '%s'" % f, fg='yellow')
|
2016-09-10 09:37:37 +00:00
|
|
|
remove(f)
|
2016-03-26 22:47:39 +00:00
|
|
|
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-04-06 21:46:32 +00:00
|
|
|
@piku.command("logs")
|
2016-03-29 20:18:38 +00:00
|
|
|
@argument('app')
|
2016-04-06 21:46:32 +00:00
|
|
|
def tail_logs(app):
|
|
|
|
"""Tail an application log"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-02 17:09:39 +00:00
|
|
|
|
2016-04-06 21:46:32 +00:00
|
|
|
logfiles = glob(join(LOG_ROOT, app, '*.log'))
|
|
|
|
if len(logfiles):
|
|
|
|
for line in multi_tail(app, logfiles):
|
|
|
|
echo(line.strip(), fg='white')
|
2016-03-29 20:18:38 +00:00
|
|
|
else:
|
2016-04-06 21:46:32 +00:00
|
|
|
echo("No logs found for app '%s'." % app, fg='yellow')
|
2016-03-29 20:18:38 +00:00
|
|
|
|
|
|
|
|
2016-04-02 18:22:51 +00:00
|
|
|
@piku.command("ps")
|
2016-04-02 16:38:53 +00:00
|
|
|
@argument('app')
|
|
|
|
def deploy_app(app):
|
|
|
|
"""Show application worker count"""
|
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-02 16:38:53 +00:00
|
|
|
config_file = join(ENV_ROOT, app, 'SCALING')
|
|
|
|
if exists(config_file):
|
|
|
|
echo(open(config_file).read().strip(), fg='white')
|
2016-04-06 09:09:05 +00:00
|
|
|
else:
|
|
|
|
echo("Error: no workers found for app '%s'." % app, fg='red')
|
2016-04-02 16:38:53 +00:00
|
|
|
|
|
|
|
|
2016-04-02 18:22:51 +00:00
|
|
|
@piku.command("ps:scale")
|
2016-04-02 16:38:53 +00:00
|
|
|
@argument('app')
|
|
|
|
@argument('settings', nargs=-1)
|
|
|
|
def deploy_app(app, settings):
|
|
|
|
"""Show application configuration"""
|
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-02 16:38:53 +00:00
|
|
|
config_file = join(ENV_ROOT, app, 'SCALING')
|
2018-06-14 21:03:32 +00:00
|
|
|
worker_count = {k:int(v) for k, v in parse_procfile(config_file).items()}
|
2016-04-02 21:32:10 +00:00
|
|
|
deltas = {}
|
2016-04-02 16:38:53 +00:00
|
|
|
for s in settings:
|
|
|
|
try:
|
2016-04-02 18:22:51 +00:00
|
|
|
k, v = map(lambda x: x.strip(), s.split("=", 1))
|
2016-04-02 16:46:11 +00:00
|
|
|
c = int(v) # check for integer value
|
2016-04-02 16:49:19 +00:00
|
|
|
if c < 0:
|
|
|
|
echo("Error: cannot scale type '%s' below 0" % k, fg='red')
|
|
|
|
return
|
2016-04-02 16:38:53 +00:00
|
|
|
if k not in worker_count:
|
|
|
|
echo("Error: worker type '%s' not present in '%s'" % (k, app), fg='red')
|
|
|
|
return
|
2016-04-02 22:37:42 +00:00
|
|
|
deltas[k] = c - worker_count[k]
|
2016-04-02 16:38:53 +00:00
|
|
|
except:
|
|
|
|
echo("Error: malformed setting '%s'" % s, fg='red')
|
|
|
|
return
|
2016-04-06 10:59:42 +00:00
|
|
|
do_deploy(app, deltas)
|
2016-04-02 16:38:53 +00:00
|
|
|
|
|
|
|
|
2016-09-09 23:11:39 +00:00
|
|
|
@piku.command("run")
|
|
|
|
@argument('app')
|
|
|
|
@argument('cmd', nargs=-1)
|
|
|
|
def deploy_app(app, cmd):
|
|
|
|
"""Run a command inside the app, e.g.: ls -- -al"""
|
|
|
|
|
|
|
|
app = exit_if_invalid(app)
|
|
|
|
|
2016-09-09 23:46:04 +00:00
|
|
|
config_file = join(ENV_ROOT, app, 'LIVE_ENV')
|
2016-09-10 09:37:37 +00:00
|
|
|
environ.update(parse_settings(config_file))
|
|
|
|
for f in [stdout, stderr]:
|
|
|
|
fl = fcntl(f, F_GETFL)
|
|
|
|
fcntl(f, F_SETFL, fl | O_NONBLOCK)
|
|
|
|
p = Popen(' '.join(cmd), stdin=stdin, stdout=stdout, stderr=stderr, env=environ, cwd=join(APP_ROOT,app), shell=True)
|
|
|
|
p.communicate()
|
2016-09-09 23:11:39 +00:00
|
|
|
|
2016-04-06 21:46:32 +00:00
|
|
|
@piku.command("restart")
|
|
|
|
@argument('app')
|
|
|
|
def restart_app(app):
|
|
|
|
"""Restart an application"""
|
|
|
|
|
|
|
|
app = exit_if_invalid(app)
|
|
|
|
|
|
|
|
config = glob(join(UWSGI_ENABLED, '%s*.ini' % app))
|
|
|
|
|
|
|
|
if len(config):
|
|
|
|
echo("Restarting app '%s'..." % app, fg='yellow')
|
|
|
|
for c in config:
|
2016-09-10 09:37:37 +00:00
|
|
|
remove(c)
|
2016-04-06 21:46:32 +00:00
|
|
|
do_deploy(app)
|
|
|
|
else:
|
|
|
|
echo("Error: app '%s' not deployed!" % app, fg='red')
|
|
|
|
|
|
|
|
|
2016-04-03 21:36:04 +00:00
|
|
|
@piku.command("setup")
|
|
|
|
def init_paths():
|
2016-04-04 08:12:25 +00:00
|
|
|
"""Initialize environment"""
|
2017-05-14 22:34:29 +00:00
|
|
|
|
|
|
|
echo("Running in Python %s" % ".".join(map(str,version_info)))
|
2016-04-03 21:36:04 +00:00
|
|
|
|
2016-04-04 08:12:25 +00:00
|
|
|
# Create required paths
|
2016-04-25 12:23:00 +00:00
|
|
|
for p in [APP_ROOT, GIT_ROOT, ENV_ROOT, UWSGI_ROOT, UWSGI_AVAILABLE, UWSGI_ENABLED, LOG_ROOT, NGINX_ROOT]:
|
2016-04-03 21:36:04 +00:00
|
|
|
if not exists(p):
|
|
|
|
echo("Creating '%s'." % p, fg='green')
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(p)
|
2016-04-04 08:12:25 +00:00
|
|
|
|
|
|
|
# Set up the uWSGI emperor config
|
|
|
|
settings = [
|
|
|
|
('chdir', UWSGI_ROOT),
|
|
|
|
('emperor', UWSGI_ENABLED),
|
|
|
|
('log-maxsize', UWSGI_LOG_MAXSIZE),
|
|
|
|
('logto', join(UWSGI_ROOT, 'uwsgi.log')),
|
|
|
|
('log-backupname', join(UWSGI_ROOT, 'uwsgi.old.log')),
|
|
|
|
('socket', join(UWSGI_ROOT, 'uwsgi.sock')),
|
2018-06-15 22:41:51 +00:00
|
|
|
('uid', getpwuid(getuid()).pw_name),
|
|
|
|
('gid', getgrgid(getgid()).gr_name),
|
2016-04-04 08:12:25 +00:00
|
|
|
('enable-threads', 'true'),
|
|
|
|
('threads', '%d' % (cpu_count() * 2)),
|
|
|
|
]
|
|
|
|
with open(join(UWSGI_ROOT,'uwsgi.ini'), 'w') as h:
|
|
|
|
h.write('[uwsgi]\n')
|
|
|
|
for k, v in settings:
|
|
|
|
h.write("%s = %s\n" % (k, v))
|
|
|
|
|
2016-04-03 21:36:04 +00:00
|
|
|
# mark this script as executable (in case we were invoked via interpreter)
|
2016-04-05 11:23:50 +00:00
|
|
|
this_script = realpath(__file__)
|
2016-09-10 09:37:37 +00:00
|
|
|
if not(stat(this_script).st_mode & S_IXUSR):
|
2016-04-03 21:36:04 +00:00
|
|
|
echo("Setting '%s' as executable." % this_script, fg='yellow')
|
2017-05-14 22:34:29 +00:00
|
|
|
chmod(this_script, stat(this_script).st_mode | S_IXUSR)
|
2016-04-03 21:36:04 +00:00
|
|
|
|
|
|
|
|
2016-04-03 16:14:15 +00:00
|
|
|
@piku.command("setup:ssh")
|
|
|
|
@argument('public_key_file')
|
|
|
|
def add_key(public_key_file):
|
2017-05-14 22:34:29 +00:00
|
|
|
"""Set up a new SSH key (use - for stdin)"""
|
|
|
|
|
|
|
|
def add_helper(key_file):
|
|
|
|
if exists(key_file):
|
|
|
|
try:
|
|
|
|
fingerprint = str(check_output('ssh-keygen -lf ' + key_file, shell=True)).split(' ', 4)[1]
|
|
|
|
key = open(key_file, 'r').read().strip()
|
|
|
|
echo("Adding key '%s'." % fingerprint, fg='white')
|
|
|
|
setup_authorized_keys(fingerprint, realpath(__file__), key)
|
|
|
|
except Exception as e:
|
|
|
|
echo("Error: invalid public key file '%s': %s" % (key_file, format_exc()), fg='red')
|
|
|
|
elif '-' == public_key_file:
|
|
|
|
buffer = "".join(stdin.readlines())
|
|
|
|
with NamedTemporaryFile(mode="w") as f:
|
|
|
|
f.write(buffer)
|
|
|
|
f.flush()
|
|
|
|
add_helper(f.name)
|
|
|
|
else:
|
|
|
|
echo("Error: public key file '%s' not found." % key_file, fg='red')
|
|
|
|
|
|
|
|
add_helper(public_key_file)
|
2016-04-03 16:14:15 +00:00
|
|
|
|
2016-04-06 21:46:32 +00:00
|
|
|
|
|
|
|
@piku.command("stop")
|
2016-04-02 15:46:03 +00:00
|
|
|
@argument('app')
|
2016-04-06 21:46:32 +00:00
|
|
|
def stop_app(app):
|
|
|
|
"""Stop an application"""
|
2017-05-14 22:34:29 +00:00
|
|
|
|
2016-04-06 09:09:05 +00:00
|
|
|
app = exit_if_invalid(app)
|
2016-04-06 21:46:32 +00:00
|
|
|
config = glob(join(UWSGI_ENABLED, '%s*.ini' % app))
|
2016-04-04 07:45:27 +00:00
|
|
|
|
2016-04-06 21:46:32 +00:00
|
|
|
if len(config):
|
|
|
|
echo("Stopping app '%s'..." % app, fg='yellow')
|
|
|
|
for c in config:
|
2016-09-10 09:37:37 +00:00
|
|
|
remove(c)
|
2016-04-02 15:46:03 +00:00
|
|
|
else:
|
2016-04-06 21:46:32 +00:00
|
|
|
echo("Error: app '%s' not deployed!" % app, fg='red')
|
|
|
|
|
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
# --- Internal commands ---
|
2016-03-26 22:47:39 +00:00
|
|
|
|
2016-03-26 17:28:01 +00:00
|
|
|
@piku.command("git-hook")
|
2016-03-26 12:52:54 +00:00
|
|
|
@argument('app')
|
|
|
|
def git_hook(app):
|
2016-03-28 22:35:31 +00:00
|
|
|
"""INTERNAL: Post-receive git hook"""
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-03-26 21:33:02 +00:00
|
|
|
app = sanitize_app_name(app)
|
2016-03-27 12:08:30 +00:00
|
|
|
repo_path = join(GIT_ROOT, app)
|
|
|
|
app_path = join(APP_ROOT, app)
|
2016-04-01 21:53:08 +00:00
|
|
|
|
2016-09-10 09:37:37 +00:00
|
|
|
for line in stdin:
|
2016-03-26 17:15:19 +00:00
|
|
|
oldrev, newrev, refname = line.strip().split(" ")
|
2017-05-14 22:34:29 +00:00
|
|
|
#echo("refs:", oldrev, newrev, refname)
|
2016-03-26 17:14:13 +00:00
|
|
|
if refname == "refs/heads/master":
|
2016-03-26 17:23:29 +00:00
|
|
|
# Handle pushes to master branch
|
2016-03-27 12:08:30 +00:00
|
|
|
if not exists(app_path):
|
|
|
|
echo("-----> Creating app '%s'" % app, fg='green')
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(app_path)
|
2016-03-27 12:19:09 +00:00
|
|
|
call('git clone --quiet %s %s' % (repo_path, app), cwd=APP_ROOT, shell=True)
|
2016-03-26 22:19:30 +00:00
|
|
|
do_deploy(app)
|
2016-03-26 17:14:13 +00:00
|
|
|
else:
|
2016-03-28 22:45:35 +00:00
|
|
|
# TODO: Handle pushes to another branch
|
|
|
|
echo("receive-branch '%s': %s, %s" % (app, newrev, refname))
|
2016-03-29 20:18:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
@piku.command("git-receive-pack")
|
|
|
|
@argument('app')
|
|
|
|
def receive(app):
|
|
|
|
"""INTERNAL: Handle git pushes for an app"""
|
2017-05-15 20:37:14 +00:00
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
app = sanitize_app_name(app)
|
|
|
|
hook_path = join(GIT_ROOT, app, 'hooks', 'post-receive')
|
2017-05-15 20:37:14 +00:00
|
|
|
|
2016-03-29 20:18:38 +00:00
|
|
|
if not exists(hook_path):
|
2016-09-10 09:37:37 +00:00
|
|
|
makedirs(dirname(hook_path))
|
2016-03-29 20:18:38 +00:00
|
|
|
# Initialize the repository with a hook to this script
|
|
|
|
call("git init --quiet --bare " + app, cwd=GIT_ROOT, shell=True)
|
2017-05-15 20:37:14 +00:00
|
|
|
with open(hook_path, 'w') as h:
|
2016-03-29 20:18:38 +00:00
|
|
|
h.write("""#!/usr/bin/env bash
|
|
|
|
set -e; set -o pipefail;
|
2016-04-03 16:43:19 +00:00
|
|
|
cat | PIKU_ROOT="%s" %s git-hook %s""" % (PIKU_ROOT, realpath(__file__), app))
|
2016-03-29 20:18:38 +00:00
|
|
|
# Make the hook executable by our user
|
2016-09-10 09:37:37 +00:00
|
|
|
chmod(hook_path, stat(hook_path).st_mode | S_IXUSR)
|
2016-03-29 20:18:38 +00:00
|
|
|
# Handle the actual receive. We'll be called with 'git-hook' after it happens
|
2017-05-14 22:34:29 +00:00
|
|
|
call('git-shell -c "%s" ' % (argv[1] + " '%s'" % app), cwd=GIT_ROOT, shell=True)
|
2017-05-15 20:37:14 +00:00
|
|
|
|
|
|
|
|
2016-03-26 12:52:54 +00:00
|
|
|
if __name__ == '__main__':
|
2017-05-15 20:37:14 +00:00
|
|
|
piku()
|