piku/piku.py

1531 wiersze
54 KiB
Python
Czysty Zwykły widok Historia

2018-06-14 18:37:53 +00:00
#!/usr/bin/env python3
2016-03-26 12:52:54 +00:00
2017-05-14 22:34:29 +00:00
"Piku Micro-PaaS"
2018-07-24 20:12:08 +00:00
try:
from sys import version_info
2019-11-25 19:53:36 +00:00
assert version_info >= (3, 5)
2018-07-24 20:12:08 +00:00
except AssertionError:
2019-11-25 19:53:36 +00:00
exit("Piku requires Python 3.5 or above")
2018-07-24 20:12:08 +00:00
from importlib import import_module
2016-04-01 23:16:10 +00:00
from collections import defaultdict, deque
from fcntl import fcntl, F_SETFL, F_GETFL
2016-04-01 23:16:10 +00:00
from glob import glob
from json import loads
2016-04-04 08:12:25 +00:00
from multiprocessing import cpu_count
2019-11-27 16:05:52 +00:00
from os import chmod, getgid, getuid, symlink, unlink, remove, stat, listdir, environ, makedirs, O_NONBLOCK
2019-11-27 23:17:40 +00:00
from os.path import abspath, basename, dirname, exists, getmtime, join, realpath, splitext, isdir
from pwd import getpwuid
2019-11-27 16:05:52 +00:00
from grp import getgrgid
2021-03-07 19:40:52 +00:00
from re import sub, match
from shutil import copyfile, rmtree, which
from socket import socket, AF_INET, SOCK_STREAM
from stat import S_IRUSR, S_IWUSR, S_IXUSR
from subprocess import call, check_output, Popen, STDOUT
2019-11-27 13:34:12 +00:00
from sys import argv, stdin, stdout, stderr, version_info, exit, path as sys_path
2017-05-14 22:34:29 +00:00
from tempfile import NamedTemporaryFile
2016-03-29 20:57:51 +00:00
from time import sleep
from traceback import format_exc
from urllib.request import urlopen
2019-11-26 04:35:11 +00:00
from click import argument, group, secho as echo, pass_context, CommandCollection
# === Make sure we can access all system binaries ===
if 'sbin' not in environ['PATH']:
2018-06-24 22:02:56 +00:00
environ['PATH'] = "/usr/local/sbin:/usr/sbin:/sbin:" + environ['PATH']
2016-03-26 12:52:54 +00:00
2016-03-29 20:18:38 +00:00
# === Globals - all tweakable settings are here ===
2016-03-27 12:12:13 +00:00
PIKU_RAW_SOURCE_URL = "https://raw.githubusercontent.com/piku/piku/master/piku.py"
PIKU_ROOT = environ.get('PIKU_ROOT', join(environ['HOME'], '.piku'))
PIKU_BIN = join(environ['HOME'], 'bin')
2018-12-23 14:01:25 +00:00
PIKU_SCRIPT = realpath(__file__)
2021-12-08 16:17:53 +00:00
PIKU_PLUGIN_ROOT = abspath(join(PIKU_ROOT, "plugins"))
2016-03-27 12:08:30 +00:00
APP_ROOT = abspath(join(PIKU_ROOT, "apps"))
2016-03-27 12:20:44 +00:00
ENV_ROOT = abspath(join(PIKU_ROOT, "envs"))
2016-03-27 12:08:30 +00:00
GIT_ROOT = abspath(join(PIKU_ROOT, "repos"))
LOG_ROOT = abspath(join(PIKU_ROOT, "logs"))
2016-04-25 09:10:07 +00:00
NGINX_ROOT = abspath(join(PIKU_ROOT, "nginx"))
2016-03-28 22:37:36 +00:00
UWSGI_AVAILABLE = abspath(join(PIKU_ROOT, "uwsgi-available"))
UWSGI_ENABLED = abspath(join(PIKU_ROOT, "uwsgi-enabled"))
UWSGI_ROOT = abspath(join(PIKU_ROOT, "uwsgi"))
2016-04-04 08:12:25 +00:00
UWSGI_LOG_MAXSIZE = '1048576'
ACME_ROOT = environ.get('ACME_ROOT', join(environ['HOME'], '.acme.sh'))
ACME_WWW = abspath(join(PIKU_ROOT, "acme"))
ACME_ROOT_CA = environ.get('ACME_ROOT_CA', 'letsencrypt.org')
2018-12-23 15:31:55 +00:00
# === Make sure we can access piku user-installed binaries === #
if PIKU_BIN not in environ['PATH']:
environ['PATH'] = PIKU_BIN + ":" + environ['PATH']
2018-12-23 15:31:55 +00:00
# pylint: disable=anomalous-backslash-in-string
2016-04-25 09:10:07 +00:00
NGINX_TEMPLATE = """
upstream $APP {
server $NGINX_SOCKET;
2016-04-25 09:10:07 +00:00
}
server {
listen $NGINX_IPV6_ADDRESS:80;
listen $NGINX_IPV4_ADDRESS:80;
2016-04-25 12:23:00 +00:00
location ^~ /.well-known/acme-challenge {
allow all;
root ${ACME_WWW};
}
$PIKU_INTERNAL_NGINX_COMMON
2016-04-25 09:10:07 +00:00
}
"""
2016-03-27 12:12:13 +00:00
2018-08-29 15:51:04 +00:00
NGINX_HTTPS_ONLY_TEMPLATE = """
upstream $APP {
server $NGINX_SOCKET;
}
server {
listen $NGINX_IPV6_ADDRESS:80;
listen $NGINX_IPV4_ADDRESS:80;
2018-08-29 15:51:04 +00:00
server_name $NGINX_SERVER_NAME;
location ^~ /.well-known/acme-challenge {
allow all;
root ${ACME_WWW};
}
location / {
return 301 https://$server_name$request_uri;
}
2018-08-29 15:51:04 +00:00
}
server {
$PIKU_INTERNAL_NGINX_COMMON
}
"""
# pylint: enable=anomalous-backslash-in-string
NGINX_COMMON_FRAGMENT = """
listen $NGINX_IPV6_ADDRESS:$NGINX_SSL;
listen $NGINX_IPV4_ADDRESS:$NGINX_SSL;
2018-08-29 15:51:04 +00:00
ssl_certificate $NGINX_ROOT/$APP.crt;
ssl_certificate_key $NGINX_ROOT/$APP.key;
server_name $NGINX_SERVER_NAME;
# These are not required under systemd - enable for debugging only
# access_log $LOG_ROOT/$APP/access.log;
# error_log $LOG_ROOT/$APP/error.log;
2019-11-19 08:09:08 +00:00
2018-08-29 15:51:04 +00:00
# Enable gzip compression
gzip on;
gzip_proxied any;
gzip_types text/plain text/xml text/css text/javascript text/js application/x-javascript application/javascript application/json application/xml+rss application/atom+xml image/svg+xml;
2018-08-29 15:51:04 +00:00
gzip_comp_level 7;
gzip_min_length 2048;
gzip_vary on;
2019-11-20 19:41:04 +00:00
gzip_disable "MSIE [1-6]\.(?!.*SV1)";
2019-11-19 08:09:08 +00:00
2018-08-29 15:51:04 +00:00
# set a custom header for requests
add_header X-Deployed-By Piku;
$PIKU_INTERNAL_NGINX_CUSTOM_CLAUSES
$PIKU_INTERNAL_NGINX_STATIC_MAPPINGS
2018-08-29 15:51:04 +00:00
$PIKU_INTERNAL_NGINX_BLOCK_GIT
$PIKU_INTERNAL_NGINX_PORTMAP
"""
NGINX_PORTMAP_FRAGMENT = """
2018-08-29 15:51:04 +00:00
location / {
$PIKU_INTERNAL_NGINX_UWSGI_SETTINGS
2018-08-29 15:51:04 +00:00
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
2020-03-31 17:59:29 +00:00
proxy_set_header Host $host;
2018-08-29 15:51:04 +00:00
proxy_set_header X-Forwarded-Proto $scheme;
2020-03-19 17:31:33 +00:00
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Remote-Address $remote_addr;
2018-08-29 15:51:04 +00:00
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Request-Start $msec;
$NGINX_ACL
}
2018-08-29 15:51:04 +00:00
"""
NGINX_ACME_FIRSTRUN_TEMPLATE = """
server {
listen $NGINX_IPV6_ADDRESS:80;
listen $NGINX_IPV4_ADDRESS:80;
server_name $NGINX_SERVER_NAME;
location ^~ /.well-known/acme-challenge {
allow all;
root ${ACME_WWW};
}
}
"""
PIKU_INTERNAL_NGINX_STATIC_MAPPING = """
location $static_url {
2016-10-02 17:44:28 +00:00
sendfile on;
sendfile_max_chunk 1m;
2016-10-02 17:44:28 +00:00
tcp_nopush on;
directio 8m;
aio threads;
alias $static_path;
try_files $uri $uri.html $uri/ =404;
}
"""
PIKU_INTERNAL_NGINX_UWSGI_SETTINGS = """
uwsgi_pass $APP;
uwsgi_param QUERY_STRING $query_string;
uwsgi_param REQUEST_METHOD $request_method;
uwsgi_param CONTENT_TYPE $content_type;
uwsgi_param CONTENT_LENGTH $content_length;
uwsgi_param REQUEST_URI $request_uri;
uwsgi_param PATH_INFO $document_uri;
uwsgi_param DOCUMENT_ROOT $document_root;
uwsgi_param SERVER_PROTOCOL $server_protocol;
uwsgi_param REMOTE_ADDR $remote_addr;
uwsgi_param REMOTE_PORT $remote_port;
uwsgi_param SERVER_ADDR $server_addr;
uwsgi_param SERVER_PORT $server_port;
uwsgi_param SERVER_NAME $server_name;
"""
2021-03-07 19:40:52 +00:00
CRON_REGEXP = "^((?:(?:\*\/)?\d+)|\*) ((?:(?:\*\/)?\d+)|\*) ((?:(?:\*\/)?\d+)|\*) ((?:(?:\*\/)?\d+)|\*) ((?:(?:\*\/)?\d+)|\*) (.*)$"
2016-03-29 20:18:38 +00:00
# === Utility functions ===
2016-03-26 12:52:54 +00:00
2016-03-26 21:33:02 +00:00
def sanitize_app_name(app):
2016-03-26 17:14:13 +00:00
"""Sanitize the app name and build matching path"""
2019-11-19 08:09:08 +00:00
2021-12-11 12:36:05 +00:00
app = "".join(c for c in app if c.isalnum() or c in ('.', '_', '-')).rstrip().lstrip('/')
2016-03-26 21:33:02 +00:00
return app
2016-03-26 17:14:13 +00:00
2016-04-06 09:09:05 +00:00
def exit_if_invalid(app):
"""Utility function for error checking upon command startup."""
app = sanitize_app_name(app)
if not exists(join(APP_ROOT, app)):
echo("Error: app '{}' not found.".format(app), fg='red')
2016-04-06 09:09:05 +00:00
exit(1)
return app
2016-03-26 12:52:54 +00:00
def get_free_port(address=""):
2016-03-28 18:28:24 +00:00
"""Find a free TCP port (entirely at random)"""
2019-11-19 08:09:08 +00:00
s = socket(AF_INET, SOCK_STREAM)
s.bind((address, 0)) # lgtm [py/bind-socket-all-network-interfaces]
2016-03-26 12:52:54 +00:00
port = s.getsockname()[1]
s.close()
return port
2016-03-26 17:28:01 +00:00
2016-04-02 16:38:53 +00:00
def write_config(filename, bag, separator='='):
"""Helper for writing out config files"""
2019-11-19 08:09:08 +00:00
2016-04-02 16:38:53 +00:00
with open(filename, 'w') as h:
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2018-06-14 21:03:32 +00:00
for k, v in bag.items():
2018-12-23 15:07:52 +00:00
h.write('{k:s}{separator:s}{v}\n'.format(**locals()))
2016-04-02 16:38:53 +00:00
2016-03-26 12:52:54 +00:00
def setup_authorized_keys(ssh_fingerprint, script_path, pubkey):
2016-03-26 17:39:23 +00:00
"""Sets up an authorized_keys file to redirect SSH commands"""
2019-11-19 08:09:08 +00:00
authorized_keys = join(environ['HOME'], '.ssh', 'authorized_keys')
2016-03-27 12:18:04 +00:00
if not exists(dirname(authorized_keys)):
makedirs(dirname(authorized_keys))
2019-11-19 08:09:08 +00:00
# Restrict features and force all SSH commands to go through our script
2016-03-28 17:40:52 +00:00
with open(authorized_keys, 'a') as h:
h.write("""command="FINGERPRINT={ssh_fingerprint:s} NAME=default {script_path:s} $SSH_ORIGINAL_COMMAND",no-agent-forwarding,no-user-rc,no-X11-forwarding,no-port-forwarding {pubkey:s}\n""".format(**locals()))
chmod(dirname(authorized_keys), S_IRUSR | S_IWUSR | S_IXUSR)
chmod(authorized_keys, S_IRUSR | S_IWUSR)
2016-03-27 12:12:13 +00:00
2016-03-29 19:24:17 +00:00
def parse_procfile(filename):
"""Parses a Procfile and returns the worker types. Only one worker of each type is allowed."""
2019-11-19 08:09:08 +00:00
2016-03-29 19:24:17 +00:00
workers = {}
if not exists(filename):
return None
2016-03-29 19:24:17 +00:00
with open(filename, 'r') as procfile:
for line_number, line in enumerate(procfile):
line = line.strip()
if line.startswith("#") or not line:
continue
2016-03-29 19:38:48 +00:00
try:
kind, command = map(lambda x: x.strip(), line.split(":", 1))
2021-03-07 19:40:52 +00:00
# Check for cron patterns
if kind == "cron":
limits = [59, 24, 31, 12, 7]
2021-11-30 13:35:45 +00:00
matches = match(CRON_REGEXP, command).groups()
2021-03-07 19:40:52 +00:00
if matches:
for i in range(len(limits)):
2021-11-30 13:35:45 +00:00
if int(matches[i].replace("*/", "").replace("*", "1")) > limits[i]:
2021-03-07 19:40:52 +00:00
raise ValueError
2016-11-20 10:05:35 +00:00
workers[kind] = command
2019-11-20 19:27:20 +00:00
except Exception:
2021-03-07 19:40:52 +00:00
echo("Warning: misformatted Procfile entry '{}' at line {}".format(line, line_number), fg='yellow')
if len(workers) == 0:
2016-04-02 16:38:53 +00:00
return {}
2016-03-29 19:24:17 +00:00
# WSGI trumps regular web workers
2020-01-07 14:30:27 +00:00
if 'wsgi' in workers or 'jwsgi' in workers or 'rwsgi' in workers:
2016-03-29 19:24:17 +00:00
if 'web' in workers:
2019-06-20 14:03:58 +00:00
echo("Warning: found both 'wsgi' and 'web' workers, disabling 'web'", fg='yellow')
del workers['web']
2019-11-19 08:09:08 +00:00
return workers
2016-03-31 22:39:29 +00:00
2016-04-25 09:24:35 +00:00
def expandvars(buffer, env, default=None, skip_escaped=False):
"""expand shell-style environment variables in a buffer"""
2019-11-19 08:09:08 +00:00
2016-04-25 09:24:35 +00:00
def replace_var(match):
return env.get(match.group(2) or match.group(1), match.group(0) if default is None else default)
2019-11-19 08:09:08 +00:00
2016-04-25 09:24:35 +00:00
pattern = (r'(?<!\\)' if skip_escaped else '') + r'\$(\w+|\{([^}]*)\})'
return sub(pattern, replace_var, buffer)
2016-04-25 09:24:35 +00:00
2016-04-25 12:23:00 +00:00
def command_output(cmd):
"""executes a command and grabs its output, if any"""
try:
env = environ
2017-05-14 22:34:29 +00:00
return str(check_output(cmd, stderr=STDOUT, env=env, shell=True))
2019-11-20 19:27:20 +00:00
except Exception:
2016-04-25 12:23:00 +00:00
return ""
def parse_settings(filename, env={}):
2016-03-31 22:39:29 +00:00
"""Parses a settings file and returns a dict with environment variables"""
2019-11-19 08:09:08 +00:00
2016-03-31 22:39:29 +00:00
if not exists(filename):
2016-04-02 15:46:03 +00:00
return {}
2019-11-19 08:09:08 +00:00
2016-03-31 22:39:29 +00:00
with open(filename, 'r') as settings:
for line in settings:
if line[0] == '#' or len(line.strip()) == 0: # ignore comments and newlines
2016-04-02 19:13:57 +00:00
continue
2016-03-31 22:39:29 +00:00
try:
k, v = map(lambda x: x.strip(), line.split("=", 1))
env[k] = expandvars(v, env)
2019-11-20 19:37:37 +00:00
except Exception:
echo("Error: malformed setting '{}', ignoring file.".format(line), fg='red')
2016-04-02 16:38:53 +00:00
return {}
2016-03-31 22:39:29 +00:00
return env
def check_requirements(binaries):
"""Checks if all the binaries exist and are executable"""
echo("-----> Checking requirements: {}".format(binaries), fg='green')
requirements = list(map(which, binaries))
echo(str(requirements))
if None in requirements:
return False
return True
def found_app(kind):
"""Helper function to output app detected"""
echo("-----> {} app detected.".format(kind), fg='green')
return True
2016-03-27 12:12:13 +00:00
def do_deploy(app, deltas={}, newrev=None):
2016-03-27 12:12:13 +00:00
"""Deploy an app by resetting the work directory"""
2019-11-19 08:09:08 +00:00
2016-03-27 12:12:13 +00:00
app_path = join(APP_ROOT, app)
2016-03-29 19:24:17 +00:00
procfile = join(app_path, 'Procfile')
2016-04-01 23:16:10 +00:00
log_path = join(LOG_ROOT, app)
2016-03-27 12:12:13 +00:00
env = {'GIT_WORK_DIR': app_path}
if exists(app_path):
echo("-----> Deploying app '{}'".format(app), fg='green')
call('git fetch --quiet', cwd=app_path, env=env, shell=True)
if newrev:
call('git reset --hard {}'.format(newrev), cwd=app_path, env=env, shell=True)
call('git submodule init', cwd=app_path, env=env, shell=True)
call('git submodule update', cwd=app_path, env=env, shell=True)
2016-04-01 23:16:10 +00:00
if not exists(log_path):
makedirs(log_path)
2016-03-29 19:24:17 +00:00
workers = parse_procfile(procfile)
if workers and len(workers) > 0:
settings = {}
if exists(join(app_path, 'requirements.txt')) and found_app("Python"):
settings.update(deploy_python(app, deltas))
elif exists(join(app_path, 'Gemfile')) and found_app("Ruby Application") and check_requirements(['ruby', 'gem', 'bundle']):
settings.update(deploy_ruby(app, deltas))
elif exists(join(app_path, 'package.json')) and found_app("Node") and (
2021-03-16 10:03:33 +00:00
check_requirements(['nodejs', 'npm']) or check_requirements(['node', 'npm']) or check_requirements(['nodeenv'])):
settings.update(deploy_node(app, deltas))
elif exists(join(app_path, 'pom.xml')) and found_app("Java Maven") and check_requirements(['java', 'mvn']):
2021-11-01 19:53:58 +00:00
settings.update(deploy_java_maven(app, deltas))
elif exists(join(app_path, 'build.gradle')) and found_app("Java Gradle") and check_requirements(['java', 'gradle']):
2021-11-01 19:53:58 +00:00
settings.update(deploy_java_gradle(app, deltas))
elif (exists(join(app_path, 'Godeps')) or len(glob(join(app_path, '*.go')))) and found_app("Go") and check_requirements(['go']):
settings.update(deploy_go(app, deltas))
elif exists(join(app_path, 'project.clj')) and found_app("Clojure Lein") and check_requirements(['java', 'lein']):
settings.update(deploy_clojure(app, deltas))
elif 'release' in workers and 'web' in workers:
echo("-----> Generic app detected.", fg='green')
settings.update(deploy_identity(app, deltas))
elif 'static' in workers:
echo("-----> Static app detected.", fg='green')
settings.update(deploy_identity(app, deltas))
2016-03-29 19:24:17 +00:00
else:
echo("-----> Could not detect runtime!", fg='red')
# TODO: detect other runtimes
if "release" in workers:
echo("-----> Releasing", fg='green')
retval = call(workers["release"], cwd=app_path, env=settings, shell=True)
if retval:
echo("-----> Exiting due to release command error value: {}".format(retval))
exit(retval)
workers.pop("release", None)
2016-03-28 18:28:24 +00:00
else:
echo("Error: Invalid Procfile for app '{}'.".format(app), fg='red')
2016-03-27 12:12:13 +00:00
else:
echo("Error: app '{}' not found.".format(app), fg='red')
2018-07-24 20:12:08 +00:00
2021-11-01 19:53:58 +00:00
def deploy_java_gradle(app, deltas={}):
2019-11-19 08:09:08 +00:00
"""Deploy a Java application using Gradle"""
java_path = join(ENV_ROOT, app)
build_path = join(APP_ROOT, app, 'build')
env_file = join(APP_ROOT, app, 'ENV')
env = {
'VIRTUAL_ENV': java_path,
"PATH": ':'.join([join(java_path, "bin"), join(app, ".bin"), environ['PATH']])
}
2019-11-19 08:09:08 +00:00
if exists(env_file):
env.update(parse_settings(env_file, env))
2019-11-19 08:09:08 +00:00
if not exists(java_path):
makedirs(java_path)
if not exists(build_path):
echo("-----> Building Java Application")
call('gradle build', cwd=join(APP_ROOT, app), env=env, shell=True)
else:
echo("-----> Removing previous builds")
echo("-----> Rebuilding Java Application")
call('gradle clean build', cwd=join(APP_ROOT, app), env=env, shell=True)
2019-11-19 08:09:08 +00:00
return spawn_app(app, deltas)
2018-07-24 20:12:08 +00:00
2021-11-01 19:53:58 +00:00
def deploy_java_maven(app, deltas={}):
"""Deploy a Java application using Maven"""
# TODO: Use jenv to isolate Java Application environments
java_path = join(ENV_ROOT, app)
target_path = join(APP_ROOT, app, 'target')
env_file = join(APP_ROOT, app, 'ENV')
env = {
'VIRTUAL_ENV': java_path,
"PATH": ':'.join([join(java_path, "bin"), join(app, ".bin"), environ['PATH']])
}
2019-11-19 08:09:08 +00:00
if exists(env_file):
env.update(parse_settings(env_file, env))
2019-11-19 08:09:08 +00:00
if not exists(java_path):
makedirs(java_path)
if not exists(target_path):
echo("-----> Building Java Application")
call('mvn package', cwd=join(APP_ROOT, app), env=env, shell=True)
else:
echo("-----> Removing previous builds")
echo("-----> Rebuilding Java Application")
call('mvn clean package', cwd=join(APP_ROOT, app), env=env, shell=True)
2019-11-19 08:09:08 +00:00
return spawn_app(app, deltas)
def deploy_clojure(app, deltas={}):
"""Deploy a Clojure Application"""
virtual = join(ENV_ROOT, app)
target_path = join(APP_ROOT, app, 'target')
env_file = join(APP_ROOT, app, 'ENV')
if not exists(target_path):
makedirs(virtual)
env = {
'VIRTUAL_ENV': virtual,
"PATH": ':'.join([join(virtual, "bin"), join(app, ".bin"), environ['PATH']]),
"LEIN_HOME": environ.get('LEIN_HOME', join(environ['HOME'], '.lein')),
}
if exists(env_file):
env.update(parse_settings(env_file, env))
echo("-----> Building Clojure Application")
call('lein clean', cwd=join(APP_ROOT, app), env=env, shell=True)
call('lein uberjar', cwd=join(APP_ROOT, app), env=env, shell=True)
return spawn_app(app, deltas)
2018-07-24 20:12:08 +00:00
2020-01-07 14:30:27 +00:00
def deploy_ruby(app, deltas={}):
"""Deploy a Ruby Application"""
virtual = join(ENV_ROOT, app)
env_file = join(APP_ROOT, app, 'ENV')
env = {
'VIRTUAL_ENV': virtual,
"PATH": ':'.join([join(virtual, "bin"), join(app, ".bin"), environ['PATH']]),
}
if exists(env_file):
env.update(parse_settings(env_file, env))
2020-01-07 15:02:11 +00:00
if not exists(virtual):
2020-01-07 14:30:27 +00:00
echo("-----> Building Ruby Application")
makedirs(virtual)
else:
echo("------> Rebuilding Ruby Application")
call('bundle install', cwd=join(APP_ROOT, app), env=env, shell=True)
2020-01-07 14:30:27 +00:00
return spawn_app(app, deltas)
2018-07-24 20:12:08 +00:00
def deploy_go(app, deltas={}):
"""Deploy a Go application"""
go_path = join(ENV_ROOT, app)
deps = join(APP_ROOT, app, 'Godeps')
first_time = False
if not exists(go_path):
echo("-----> Creating GOPATH for '{}'".format(app), fg='green')
makedirs(go_path)
2019-11-19 08:09:08 +00:00
# copy across a pre-built GOPATH to save provisioning time
2018-12-23 14:14:54 +00:00
call('cp -a $HOME/gopath {}'.format(app), cwd=ENV_ROOT, shell=True)
first_time = True
if exists(deps):
if first_time or getmtime(deps) > getmtime(go_path):
echo("-----> Running godep for '{}'".format(app), fg='green')
env = {
'GOPATH': '$HOME/gopath',
'GOROOT': '$HOME/go',
'PATH': '$PATH:$HOME/go/bin',
'GO15VENDOREXPERIMENT': '1'
}
call('godep update ...', cwd=join(APP_ROOT, app), env=env, shell=True)
return spawn_app(app, deltas)
def deploy_node(app, deltas={}):
"""Deploy a Node application"""
virtualenv_path = join(ENV_ROOT, app)
node_path = join(ENV_ROOT, app, "node_modules")
node_modules_symlink = join(APP_ROOT, app, "node_modules")
npm_prefix = abspath(join(node_path, ".."))
2018-06-24 22:02:56 +00:00
env_file = join(APP_ROOT, app, 'ENV')
deps = join(APP_ROOT, app, 'package.json')
first_time = False
if not exists(node_path):
echo("-----> Creating node_modules for '{}'".format(app), fg='green')
makedirs(node_path)
first_time = True
env = {
'VIRTUAL_ENV': virtualenv_path,
'NODE_PATH': node_path,
'NPM_CONFIG_PREFIX': npm_prefix,
"PATH": ':'.join([join(virtualenv_path, "bin"), join(node_path, ".bin"), environ['PATH']])
}
if exists(env_file):
env.update(parse_settings(env_file, env))
# include node binaries on our path
environ["PATH"] = env["PATH"]
version = env.get("NODE_VERSION")
node_binary = join(virtualenv_path, "bin", "node")
installed = check_output("{} -v".format(node_binary), cwd=join(APP_ROOT, app), env=env, shell=True).decode("utf8").rstrip(
"\n") if exists(node_binary) else ""
if version and check_requirements(['nodeenv']):
if not installed.endswith(version):
started = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
if installed and len(started):
echo("Warning: Can't update node with app running. Stop the app & retry.", fg='yellow')
else:
echo("-----> Installing node version '{NODE_VERSION:s}' using nodeenv".format(**env), fg='green')
call("nodeenv --prebuilt --node={NODE_VERSION:s} --clean-src --force {VIRTUAL_ENV:s}".format(**env),
cwd=virtualenv_path, env=env, shell=True)
else:
echo("-----> Node is installed at {}.".format(version))
if exists(deps) and check_requirements(['npm']):
if first_time or getmtime(deps) > getmtime(node_path):
copyfile(join(APP_ROOT, app, 'package.json'), join(ENV_ROOT, app, 'package.json'))
if not exists(node_modules_symlink):
symlink(node_path, node_modules_symlink)
echo("-----> Running npm for '{}'".format(app), fg='green')
call('npm install --prefix {} --package-lock=false'.format(npm_prefix), cwd=join(APP_ROOT, app), env=env, shell=True)
return spawn_app(app, deltas)
def deploy_python(app, deltas={}):
2016-03-28 18:28:24 +00:00
"""Deploy a Python application"""
2019-11-19 08:09:08 +00:00
2016-04-01 23:16:10 +00:00
virtualenv_path = join(ENV_ROOT, app)
2016-04-01 21:53:08 +00:00
requirements = join(APP_ROOT, app, 'requirements.txt')
env_file = join(APP_ROOT, app, 'ENV')
# Peek at environment variables shipped with repo (if any) to determine version
env = {}
if exists(env_file):
env.update(parse_settings(env_file, env))
# TODO: improve version parsing
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2018-06-14 21:16:09 +00:00
version = int(env.get("PYTHON_VERSION", "3"))
2016-04-01 21:50:18 +00:00
2016-04-01 23:22:40 +00:00
first_time = False
if not exists(join(virtualenv_path, "bin", "activate")):
echo("-----> Creating virtualenv for '{}'".format(app), fg='green')
try:
makedirs(virtualenv_path)
except FileExistsError:
echo("-----> Env dir already exists: '{}'".format(app), fg='yellow')
call('virtualenv --python=python{version:d} {app:s}'.format(**locals()), cwd=ENV_ROOT, shell=True)
2016-04-01 23:22:40 +00:00
first_time = True
2016-03-28 18:28:24 +00:00
activation_script = join(virtualenv_path, 'bin', 'activate_this.py')
2018-06-14 21:03:32 +00:00
exec(open(activation_script).read(), dict(__file__=activation_script))
2016-04-01 23:22:40 +00:00
if first_time or getmtime(requirements) > getmtime(virtualenv_path):
echo("-----> Running pip for '{}'".format(app), fg='green')
call('pip install -r {}'.format(requirements), cwd=virtualenv_path, shell=True)
return spawn_app(app, deltas)
def deploy_identity(app, deltas={}):
env_path = join(ENV_ROOT, app)
if not exists(env_path):
makedirs(env_path)
return spawn_app(app, deltas)
def spawn_app(app, deltas={}):
2016-03-31 22:42:13 +00:00
"""Create all workers for an app"""
2019-11-19 08:09:08 +00:00
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
app_path = join(APP_ROOT, app)
procfile = join(app_path, 'Procfile')
workers = parse_procfile(procfile)
workers.pop("release", None)
ordinals = defaultdict(lambda: 1)
worker_count = {k: 1 for k in workers.keys()}
2016-04-01 21:50:18 +00:00
# the Python virtualenv
2016-04-01 23:16:10 +00:00
virtualenv_path = join(ENV_ROOT, app)
2016-04-01 21:50:18 +00:00
# Settings shipped with the app
env_file = join(APP_ROOT, app, 'ENV')
# Custom overrides
2016-04-01 23:16:10 +00:00
settings = join(ENV_ROOT, app, 'ENV')
2016-04-02 15:46:03 +00:00
# Live settings
live = join(ENV_ROOT, app, 'LIVE_ENV')
2016-04-02 16:38:53 +00:00
# Scaling
scaling = join(ENV_ROOT, app, 'SCALING')
2016-04-25 12:23:00 +00:00
2016-04-03 10:35:39 +00:00
# Bootstrap environment
2016-03-31 22:39:29 +00:00
env = {
2016-04-25 09:35:34 +00:00
'APP': app,
'LOG_ROOT': LOG_ROOT,
'HOME': environ['HOME'],
'USER': environ['USER'],
'PATH': ':'.join([join(virtualenv_path, 'bin'), environ['PATH']]),
2016-04-01 23:29:59 +00:00
'PWD': dirname(env_file),
2016-04-25 10:50:37 +00:00
'VIRTUAL_ENV': virtualenv_path,
2016-03-31 22:39:29 +00:00
}
safe_defaults = {
'NGINX_IPV4_ADDRESS': '0.0.0.0',
'NGINX_IPV6_ADDRESS': '[::]',
'BIND_ADDRESS': '127.0.0.1',
}
# add node path if present
node_path = join(virtualenv_path, "node_modules")
if exists(node_path):
env["NODE_PATH"] = node_path
env["PATH"] = ':'.join([join(node_path, ".bin"), env['PATH']])
2016-03-31 22:39:29 +00:00
# Load environment variables shipped with repo (if any)
if exists(env_file):
env.update(parse_settings(env_file, env))
2016-03-31 22:39:29 +00:00
# Override with custom settings (if any)
if exists(settings):
env.update(parse_settings(settings, env)) # lgtm [py/modification-of-default-value]
2016-04-25 12:23:00 +00:00
if 'web' in workers or 'wsgi' in workers or 'jwsgi' in workers or 'static' in workers or 'rwsgi' in workers:
2018-12-23 15:07:52 +00:00
# Pick a port if none defined
if 'PORT' not in env:
2016-05-03 21:27:23 +00:00
env['PORT'] = str(get_free_port())
2018-12-23 15:07:52 +00:00
echo("-----> picking free port {PORT}".format(**env))
2016-05-03 21:27:23 +00:00
2019-11-19 08:09:08 +00:00
# Safe defaults for addressing
for k, v in safe_defaults.items():
if k not in env:
2018-12-23 15:07:52 +00:00
echo("-----> nginx {k:s} set to {v}".format(**locals()))
env[k] = v
2019-11-19 08:09:08 +00:00
2016-05-14 17:31:02 +00:00
# Set up nginx if we have NGINX_SERVER_NAME set
if 'NGINX_SERVER_NAME' in env:
2016-05-03 21:27:23 +00:00
nginx = command_output("nginx -V")
nginx_ssl = "443 ssl"
if "--with-http_v2_module" in nginx:
nginx_ssl += " http2"
elif "--with-http_spdy_module" in nginx and "nginx/1.6.2" not in nginx: # avoid Raspbian bug
2016-05-03 21:27:23 +00:00
nginx_ssl += " spdy"
nginx_conf = join(NGINX_ROOT, "{}.conf".format(app))
2019-11-19 08:09:08 +00:00
2020-02-04 15:30:11 +00:00
env.update({ # lgtm [py/modification-of-default-value]
2016-05-03 21:27:23 +00:00
'NGINX_SSL': nginx_ssl,
'NGINX_ROOT': NGINX_ROOT,
'ACME_WWW': ACME_WWW,
2016-05-03 21:27:23 +00:00
})
2019-11-19 08:09:08 +00:00
2018-12-23 15:07:52 +00:00
# default to reverse proxying to the TCP port we picked
env['PIKU_INTERNAL_NGINX_UWSGI_SETTINGS'] = 'proxy_pass http://{BIND_ADDRESS:s}:{PORT:s};'.format(**env)
if 'wsgi' in workers or 'jwsgi' in workers:
sock = join(NGINX_ROOT, "{}.sock".format(app))
env['PIKU_INTERNAL_NGINX_UWSGI_SETTINGS'] = expandvars(PIKU_INTERNAL_NGINX_UWSGI_SETTINGS, env)
2016-05-03 21:27:23 +00:00
env['NGINX_SOCKET'] = env['BIND_ADDRESS'] = "unix://" + sock
2016-05-05 20:09:41 +00:00
if 'PORT' in env:
del env['PORT']
2016-05-03 21:27:23 +00:00
else:
2019-11-19 08:09:08 +00:00
env['NGINX_SOCKET'] = "{BIND_ADDRESS:s}:{PORT:s}".format(**env)
echo("-----> nginx will look for app '{}' on {}".format(app, env['NGINX_SOCKET']))
domains = env['NGINX_SERVER_NAME'].split()
domain = domains[0]
issuefile = join(ACME_ROOT, domain, "issued-" + "-".join(domains))
key, crt = [join(NGINX_ROOT, "{}.{}".format(app, x)) for x in ['key', 'crt']]
if exists(join(ACME_ROOT, "acme.sh")):
acme = ACME_ROOT
www = ACME_WWW
root_ca = ACME_ROOT_CA
# if this is the first run there will be no nginx conf yet
# create a basic conf stub just to serve the acme auth
if not exists(nginx_conf):
echo("-----> writing temporary nginx conf")
buffer = expandvars(NGINX_ACME_FIRSTRUN_TEMPLATE, env)
with open(nginx_conf, "w") as h:
h.write(buffer)
if not exists(key) or not exists(issuefile):
echo("-----> getting letsencrypt certificate")
certlist = " ".join(["-d {}".format(d) for d in domains])
call('{acme:s}/acme.sh --issue {certlist:s} -w {www:s} --server {root_ca:s}}'.format(**locals()), shell=True)
call('{acme:s}/acme.sh --install-cert {certlist:s} --key-file {key:s} --fullchain-file {crt:s}'.format(
**locals()), shell=True)
if exists(join(ACME_ROOT, domain)) and not exists(join(ACME_WWW, app)):
symlink(join(ACME_ROOT, domain), join(ACME_WWW, app))
try:
symlink("/dev/null", issuefile)
except Exception:
pass
else:
echo("-----> letsencrypt certificate already installed")
# fall back to creating self-signed certificate if acme failed
if not exists(key) or stat(crt).st_size == 0:
echo("-----> generating self-signed certificate")
call(
'openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=NY/L=New York/O=Piku/OU=Self-Signed/CN={domain:s}" -keyout {key:s} -out {crt:s}'.format(
**locals()), shell=True)
2019-11-19 08:09:08 +00:00
# restrict access to server from CloudFlare IP addresses
2016-05-05 19:21:33 +00:00
acl = []
2016-05-14 17:31:02 +00:00
if env.get('NGINX_CLOUDFLARE_ACL', 'false').lower() == 'true':
try:
2019-06-25 10:46:55 +00:00
cf = loads(urlopen('https://api.cloudflare.com/client/v4/ips').read().decode("utf-8"))
2019-11-20 19:37:37 +00:00
if cf['success'] is True:
2019-06-25 10:46:55 +00:00
for i in cf['result']['ipv4_cidrs']:
acl.append("allow {};".format(i))
for i in cf['result']['ipv6_cidrs']:
acl.append("allow {};".format(i))
# allow access from controlling machine
if 'SSH_CLIENT' in environ:
remote_ip = environ['SSH_CLIENT'].split()[0]
echo("-----> Adding your IP ({}) to nginx ACL".format(remote_ip))
acl.append("allow {};".format(remote_ip))
acl.extend(["allow 127.0.0.1;", "deny all;"])
2018-12-23 15:31:55 +00:00
except Exception:
cf = defaultdict()
2018-12-23 15:31:55 +00:00
echo("-----> Could not retrieve CloudFlare IP ranges: {}".format(format_exc()), fg="red")
2016-09-09 23:46:04 +00:00
env['NGINX_ACL'] = " ".join(acl)
env['PIKU_INTERNAL_NGINX_BLOCK_GIT'] = "" if env.get('NGINX_ALLOW_GIT_FOLDERS') else "location ~ /\.git { deny all; }"
env['PIKU_INTERNAL_NGINX_STATIC_MAPPINGS'] = ''
# Get a mapping of /url:path1,/url2:path2
static_paths = env.get('NGINX_STATIC_PATHS', '')
# prepend static worker path if present
if 'static' in workers:
stripped = workers['static'].strip("/").rstrip("/")
static_paths = "/:" + (stripped if stripped else ".") + "/" + ("," if static_paths else "") + static_paths
if len(static_paths):
try:
items = static_paths.split(',')
for item in items:
static_url, static_path = item.split(':')
if static_path[0] != '/':
static_path = join(app_path, static_path)
env['PIKU_INTERNAL_NGINX_STATIC_MAPPINGS'] = env['PIKU_INTERNAL_NGINX_STATIC_MAPPINGS'] + expandvars(
PIKU_INTERNAL_NGINX_STATIC_MAPPING, locals())
except Exception as e:
echo("Error {} in static path spec: should be /url1:path1[,/url2:path2], ignoring.".format(e))
env['PIKU_INTERNAL_NGINX_STATIC_MAPPINGS'] = ''
2021-12-07 09:51:37 +00:00
env['PIKU_INTERNAL_NGINX_CUSTOM_CLAUSES'] = expandvars(open(join(app_path, env["NGINX_INCLUDE_FILE"])).read(), env) if env.get("NGINX_INCLUDE_FILE") else ""
env['PIKU_INTERNAL_NGINX_PORTMAP'] = ""
if 'web' in workers or 'wsgi' in workers or 'jwsgi' in workers or 'rwsgi' in workers:
env['PIKU_INTERNAL_NGINX_PORTMAP'] = expandvars(NGINX_PORTMAP_FRAGMENT, env)
env['PIKU_INTERNAL_NGINX_COMMON'] = expandvars(NGINX_COMMON_FRAGMENT, env)
echo("-----> nginx will map app '{}' to hostname(s) '{}'".format(app, env['NGINX_SERVER_NAME']))
if ('NGINX_HTTPS_ONLY' in env) or ('HTTPS_ONLY' in env):
2018-08-29 15:51:04 +00:00
buffer = expandvars(NGINX_HTTPS_ONLY_TEMPLATE, env)
echo("-----> nginx will redirect all requests to hostname(s) '{}' to HTTPS".format(env['NGINX_SERVER_NAME']))
2018-08-29 15:51:04 +00:00
else:
buffer = expandvars(NGINX_TEMPLATE, env)
with open(nginx_conf, "w") as h:
2017-10-07 11:41:07 +00:00
h.write(buffer)
# prevent broken config from breaking other deploys
try:
nginx_config_test = str(check_output("nginx -t 2>&1 | grep {}".format(app), env=environ, shell=True))
2019-11-20 19:37:37 +00:00
except Exception:
nginx_config_test = None
if nginx_config_test:
echo("Error: [nginx config] {}".format(nginx_config_test), fg='red')
echo("Warning: removing broken nginx config.", fg='yellow')
unlink(nginx_conf)
2016-04-25 12:23:00 +00:00
# Configured worker count
2016-04-02 16:38:53 +00:00
if exists(scaling):
worker_count.update({k: int(v) for k, v in parse_procfile(scaling).items() if k in workers})
2019-11-19 08:09:08 +00:00
to_create = {}
2019-11-19 08:09:08 +00:00
to_destroy = {}
2018-06-14 21:03:32 +00:00
for k, v in worker_count.items():
to_create[k] = range(1, worker_count[k] + 1)
2016-04-02 22:37:42 +00:00
if k in deltas and deltas[k]:
to_create[k] = range(1, worker_count[k] + deltas[k] + 1)
2016-04-02 22:37:42 +00:00
if deltas[k] < 0:
to_destroy[k] = range(worker_count[k], worker_count[k] + deltas[k], -1)
worker_count[k] = worker_count[k] + deltas[k]
2016-04-02 22:37:42 +00:00
2017-10-07 11:41:07 +00:00
# Cleanup env
2018-06-14 21:26:53 +00:00
for k, v in list(env.items()):
if k.startswith('PIKU_INTERNAL_'):
2017-10-07 11:41:07 +00:00
del env[k]
2016-04-02 22:37:42 +00:00
# Save current settings
write_config(live, env)
write_config(scaling, worker_count, ':')
2019-11-19 08:09:08 +00:00
2021-12-07 09:29:35 +00:00
if env.get('PIKU_AUTO_RESTART', 'true').lower() not in ['0', 'false']:
config = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
if len(config):
echo("-----> Removing uwsgi configs to trigger auto-restart.")
for c in config:
remove(c)
2016-04-02 22:37:42 +00:00
# Create new workers
2018-06-14 21:03:32 +00:00
for k, v in to_create.items():
2016-04-02 22:37:42 +00:00
for w in v:
enabled = join(UWSGI_ENABLED, '{app:s}_{k:s}.{w:d}.ini'.format(**locals()))
2016-04-02 22:37:42 +00:00
if not exists(enabled):
echo("-----> spawning '{app:s}:{k:s}.{w:d}'".format(**locals()), fg='green')
2016-04-02 22:37:42 +00:00
spawn_worker(app, k, workers[k], env, w)
2019-11-19 08:09:08 +00:00
# Remove unnecessary workers (leave logfiles)
2018-06-14 21:03:32 +00:00
for k, v in to_destroy.items():
for w in v: # lgtm [py/unused-loop-variable]
enabled = join(UWSGI_ENABLED, '{app:s}_{k:s}.{w:d}.ini'.format(**locals()))
if exists(enabled):
echo("-----> terminating '{app:s}:{k:s}.{w:d}'".format(**locals()), fg='yellow')
unlink(enabled)
return env
2019-11-19 08:09:08 +00:00
def spawn_worker(app, kind, command, env, ordinal=1):
2016-03-31 22:42:13 +00:00
"""Set up and deploy a single worker of a given kind"""
2019-11-19 08:09:08 +00:00
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
env['PROC_TYPE'] = kind
env_path = join(ENV_ROOT, app)
available = join(UWSGI_AVAILABLE, '{app:s}_{kind:s}.{ordinal:d}.ini'.format(**locals()))
enabled = join(UWSGI_ENABLED, '{app:s}_{kind:s}.{ordinal:d}.ini'.format(**locals()))
log_file = join(LOG_ROOT, app, kind)
2016-03-28 18:28:24 +00:00
2016-03-29 19:24:17 +00:00
settings = [
('chdir', join(APP_ROOT, app)),
2019-11-25 19:44:36 +00:00
('uid', getpwuid(getuid()).pw_name),
('gid', getgrgid(getgid()).gr_name),
('master', 'true'),
('project', app),
('max-requests', env.get('UWSGI_MAX_REQUESTS', '1024')),
('listen', env.get('UWSGI_LISTEN', '16')),
('processes', env.get('UWSGI_PROCESSES', '1')),
('procname-prefix', '{app:s}:{kind:s}'.format(**locals())),
('enable-threads', env.get('UWSGI_ENABLE_THREADS', 'true').lower()),
('log-x-forwarded-for', env.get('UWSGI_LOG_X_FORWARDED_FOR', 'false').lower()),
('log-maxsize', env.get('UWSGI_LOG_MAXSIZE', UWSGI_LOG_MAXSIZE)),
2019-11-25 19:44:36 +00:00
('logfile-chown', '%s:%s' % (getpwuid(getuid()).pw_name, getgrgid(getgid()).gr_name)),
('logfile-chmod', '640'),
('logto2', '{log_file:s}.{ordinal:d}.log'.format(**locals())),
('log-backupname', '{log_file:s}.{ordinal:d}.log.old'.format(**locals())),
2016-03-29 19:24:17 +00:00
]
2019-11-25 19:49:49 +00:00
# only add virtualenv to uwsgi if it's a real virtualenv
if exists(join(env_path, "bin", "activate_this.py")):
settings.append(('virtualenv', env_path))
2021-02-27 13:16:40 +00:00
if 'UWSGI_IDLE' in env:
try:
idle_timeout = int(env['UWSGI_IDLE'])
settings.extend([
('idle', str(idle_timeout)),
('cheap', 'True'),
('die-on-idle', 'True')
])
echo("-----> uwsgi will start workers on demand and kill them after {}s of inactivity".format(idle_timeout), fg='yellow')
2021-02-27 13:12:25 +00:00
except Exception:
echo("Error: malformed setting 'UWSGI_IDLE', ignoring it.".format(), fg='red')
pass
2021-02-27 13:16:40 +00:00
2021-03-07 19:40:52 +00:00
if kind == 'cron':
settings.extend([
2021-03-07 19:50:16 +00:00
['cron', command.replace("*/", "-").replace("*", "-1")],
2021-03-07 19:40:52 +00:00
])
if kind == 'jwsgi':
settings.extend([
('module', command),
('threads', env.get('UWSGI_THREADS', '4')),
('plugin', 'jvm'),
('plugin', 'jwsgi')
])
2020-01-07 15:02:11 +00:00
# could not come up with a better kind for ruby, web would work but that means loading the rack plugin in web.
if kind == 'rwsgi':
2020-01-07 14:30:27 +00:00
settings.extend([
('module', command),
('threads', env.get('UWSGI_THREADS', '4')),
('plugin', 'rack'),
('plugin', 'rbrequire'),
('plugin', 'post-buffering')
])
python_version = int(env.get('PYTHON_VERSION', '3'))
if kind == 'wsgi':
settings.extend([
('module', command),
('threads', env.get('UWSGI_THREADS', '4')),
2016-04-01 22:26:37 +00:00
])
2021-02-27 18:08:05 +00:00
if python_version == 2:
2016-05-14 17:31:02 +00:00
settings.extend([
('plugin', 'python'),
2016-05-14 17:31:02 +00:00
])
if 'UWSGI_GEVENT' in env:
settings.extend([
('plugin', 'gevent_python'),
('gevent', env['UWSGI_GEVENT']),
])
elif 'UWSGI_ASYNCIO' in env:
2021-02-27 18:08:05 +00:00
try:
tasks = int(env['UWSGI_ASYNCIO'])
settings.extend([
('plugin', 'asyncio_python'),
('async', tasks),
])
echo("-----> uwsgi will support {} async tasks".format(tasks), fg='yellow')
except ValueError:
echo("Error: malformed setting 'UWSGI_ASYNCIO', ignoring it.".format(), fg='red')
elif python_version == 3:
settings.extend([
('plugin', 'python3'),
])
if 'UWSGI_ASYNCIO' in env:
2021-02-27 18:08:05 +00:00
try:
tasks = int(env['UWSGI_ASYNCIO'])
settings.extend([
('plugin', 'asyncio_python3'),
('async', tasks),
])
echo("-----> uwsgi will support {} async tasks".format(tasks), fg='yellow')
except ValueError:
echo("Error: malformed setting 'UWSGI_ASYNCIO', ignoring it.".format(), fg='red')
2019-11-19 08:09:08 +00:00
2016-04-25 12:23:00 +00:00
# If running under nginx, don't expose a port at all
2016-05-14 17:31:02 +00:00
if 'NGINX_SERVER_NAME' in env:
sock = join(NGINX_ROOT, "{}.sock".format(app))
echo("-----> nginx will talk to uWSGI via {}".format(sock), fg='yellow')
2016-04-25 12:23:00 +00:00
settings.extend([
2016-04-25 12:32:52 +00:00
('socket', sock),
2016-04-25 12:58:47 +00:00
('chmod-socket', '664'),
2016-04-25 12:23:00 +00:00
])
else:
echo("-----> nginx will talk to uWSGI via {BIND_ADDRESS:s}:{PORT:s}".format(**env), fg='yellow')
2016-04-25 12:23:00 +00:00
settings.extend([
('http', '{BIND_ADDRESS:s}:{PORT:s}'.format(**env)),
('http-use-socket', '{BIND_ADDRESS:s}:{PORT:s}'.format(**env)),
('http-socket', '{BIND_ADDRESS:s}:{PORT:s}'.format(**env)),
2016-04-25 12:23:00 +00:00
])
2016-05-03 18:51:47 +00:00
elif kind == 'web':
echo("-----> nginx will talk to the 'web' process via {BIND_ADDRESS:s}:{PORT:s}".format(**env), fg='yellow')
settings.append(('attach-daemon', command))
elif kind == 'static':
echo("-----> nginx serving static files only".format(**env), fg='yellow')
2021-03-07 19:40:52 +00:00
elif kind == 'cron':
echo("-----> uwsgi scheduled cron for {command}".format(**locals()), fg='yellow')
2016-03-29 19:24:17 +00:00
else:
settings.append(('attach-daemon', command))
2019-11-19 08:09:08 +00:00
if kind in ['wsgi', 'web']:
settings.append(('log-format',
'%%(addr) - %%(user) [%%(ltime)] "%%(method) %%(uri) %%(proto)" %%(status) %%(size) "%%(referer)" "%%(uagent)" %%(msecs)ms'))
2019-11-19 08:09:08 +00:00
2016-05-14 18:10:10 +00:00
# remove unnecessary variables from the env in nginx.ini
for k in ['NGINX_ACL']:
2016-05-14 18:16:09 +00:00
if k in env:
2016-05-14 18:10:10 +00:00
del env[k]
# insert user defined uwsgi settings if set
settings += parse_settings(join(APP_ROOT, app, env.get("UWSGI_INCLUDE_FILE"))).items() if env.get("UWSGI_INCLUDE_FILE") else []
2018-06-14 21:03:32 +00:00
for k, v in env.items():
2018-12-23 15:07:52 +00:00
settings.append(('env', '{k:s}={v}'.format(**locals())))
if kind != 'static':
with open(available, 'w') as h:
h.write('[uwsgi]\n')
for k, v in settings:
h.write("{k:s} = {v}\n".format(**locals()))
copyfile(available, enabled)
2016-04-01 22:26:37 +00:00
def do_restart(app):
"""Restarts a deployed app"""
config = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
if len(config) > 0:
echo("Restarting app '{}'...".format(app), fg='yellow')
for c in config:
remove(c)
spawn_app(app)
else:
echo("Error: app '{}' not deployed!".format(app), fg='red')
2016-04-01 22:26:37 +00:00
2016-04-03 10:35:39 +00:00
def multi_tail(app, filenames, catch_up=20):
2016-04-01 22:26:37 +00:00
"""Tails multiple log files"""
2019-11-19 08:09:08 +00:00
2016-04-03 10:35:39 +00:00
# Seek helper
2016-04-01 22:26:37 +00:00
def peek(handle):
where = handle.tell()
line = handle.readline()
if not line:
handle.seek(where)
return None
return line
inodes = {}
files = {}
prefixes = {}
2019-11-19 08:09:08 +00:00
2016-04-03 10:35:39 +00:00
# Set up current state for each log file
2016-04-01 22:26:37 +00:00
for f in filenames:
2016-04-01 23:16:10 +00:00
prefixes[f] = splitext(basename(f))[0]
2020-03-15 17:07:23 +00:00
files[f] = open(f, "rt", encoding="utf-8", errors="ignore")
inodes[f] = stat(f).st_ino
2016-04-01 23:16:10 +00:00
files[f].seek(0, 2)
2019-11-19 08:09:08 +00:00
2016-04-01 22:26:37 +00:00
longest = max(map(len, prefixes.values()))
2019-11-19 08:09:08 +00:00
# Grab a little history (if any)
2016-04-01 23:16:10 +00:00
for f in filenames:
for line in deque(open(f, "rt", encoding="utf-8", errors="ignore"), catch_up):
yield "{} | {}".format(prefixes[f].ljust(longest), line)
2016-04-01 22:26:37 +00:00
while True:
updated = False
2016-04-03 10:35:39 +00:00
# Check for updates on every file
2016-04-01 22:26:37 +00:00
for f in filenames:
line = peek(files[f])
if line:
2016-04-01 22:26:37 +00:00
updated = True
yield "{} | {}".format(prefixes[f].ljust(longest), line)
2019-11-19 08:09:08 +00:00
2016-04-01 22:26:37 +00:00
if not updated:
2016-04-01 23:16:10 +00:00
sleep(1)
2016-04-03 10:35:39 +00:00
# Check if logs rotated
2016-04-01 22:26:37 +00:00
for f in filenames:
2016-04-01 23:16:10 +00:00
if exists(f):
if stat(f).st_ino != inodes[f]:
2016-04-01 23:16:10 +00:00
files[f] = open(f)
inodes[f] = stat(f).st_ino
2016-04-01 23:16:10 +00:00
else:
filenames.remove(f)
2016-04-01 22:26:37 +00:00
2016-03-27 12:12:13 +00:00
2019-11-19 08:09:08 +00:00
# === CLI commands ===
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@group(context_settings=CONTEXT_SETTINGS)
2016-03-26 17:28:01 +00:00
def piku():
2016-04-03 10:35:39 +00:00
"""The smallest PaaS you've ever seen"""
2016-04-03 21:36:04 +00:00
pass
2016-03-26 22:47:39 +00:00
2019-11-19 08:09:08 +00:00
2016-03-26 17:28:01 +00:00
@piku.resultcallback()
2016-03-26 12:52:54 +00:00
def cleanup(ctx):
2016-04-06 21:46:32 +00:00
"""Callback from command execution -- add debugging to taste"""
2016-03-28 18:28:24 +00:00
pass
2016-03-26 12:52:54 +00:00
2016-03-26 22:47:39 +00:00
2016-03-29 20:18:38 +00:00
# --- User commands ---
2016-03-26 12:52:54 +00:00
2016-04-06 21:46:32 +00:00
@piku.command("apps")
def cmd_apps():
2019-05-22 12:20:10 +00:00
"""List apps, e.g.: piku apps"""
apps = listdir(APP_ROOT)
if not apps:
2021-01-21 18:44:57 +00:00
echo("There are no applications deployed.")
return
2019-11-19 08:09:08 +00:00
for a in apps:
running = len(glob(join(UWSGI_ENABLED, '{}*.ini'.format(a)))) != 0
echo(('*' if running else ' ') + a, fg='green')
2016-04-06 21:46:32 +00:00
2016-04-02 15:46:03 +00:00
@piku.command("config")
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_config(app):
"""Show config, e.g.: piku config <app>"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2019-11-19 08:09:08 +00:00
2016-04-02 15:46:03 +00:00
config_file = join(ENV_ROOT, app, 'ENV')
if exists(config_file):
2016-04-02 16:38:53 +00:00
echo(open(config_file).read().strip(), fg='white')
2016-04-05 06:27:35 +00:00
else:
echo("Warning: app '{}' not deployed, no config found.".format(app), fg='yellow')
2016-04-02 15:46:03 +00:00
@piku.command("config:get")
@argument('app')
@argument('setting')
2018-12-23 12:21:41 +00:00
def cmd_config_get(app, setting):
"""e.g.: piku config:get <app> FOO"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2019-11-19 08:09:08 +00:00
2016-04-02 15:46:03 +00:00
config_file = join(ENV_ROOT, app, 'ENV')
if exists(config_file):
env = parse_settings(config_file)
if setting in env:
echo("{}".format(env[setting]), fg='white')
2016-04-06 21:46:32 +00:00
else:
echo("Warning: no active configuration for '{}'".format(app))
2016-04-02 15:46:03 +00:00
@piku.command("config:set")
@argument('app')
@argument('settings', nargs=-1)
2018-12-23 12:21:41 +00:00
def cmd_config_set(app, settings):
"""e.g.: piku config:set <app> FOO=bar BAZ=quux"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2019-11-19 08:09:08 +00:00
2016-04-02 15:46:03 +00:00
config_file = join(ENV_ROOT, app, 'ENV')
env = parse_settings(config_file)
for s in settings:
try:
k, v = map(lambda x: x.strip(), s.split("=", 1))
env[k] = v
2018-12-23 15:07:52 +00:00
echo("Setting {k:s}={v} for '{app:s}'".format(**locals()), fg='white')
2019-11-20 19:37:37 +00:00
except Exception:
echo("Error: malformed setting '{}'".format(s), fg='red')
2016-04-02 16:38:53 +00:00
return
write_config(config_file, env)
2016-04-02 15:46:03 +00:00
do_deploy(app)
2016-05-03 21:45:58 +00:00
@piku.command("config:unset")
@argument('app')
@argument('settings', nargs=-1)
2018-12-23 12:21:41 +00:00
def cmd_config_unset(app, settings):
"""e.g.: piku config:unset <app> FOO"""
2019-11-19 08:09:08 +00:00
2016-05-03 21:45:58 +00:00
app = exit_if_invalid(app)
2019-11-19 08:09:08 +00:00
2016-05-03 21:45:58 +00:00
config_file = join(ENV_ROOT, app, 'ENV')
env = parse_settings(config_file)
for s in settings:
if s in env:
del env[s]
echo("Unsetting {} for '{}'".format(s, app), fg='white')
2016-05-03 21:45:58 +00:00
write_config(config_file, env)
do_deploy(app)
2016-04-02 15:46:03 +00:00
@piku.command("config:live")
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_config_live(app):
"""e.g.: piku config:live <app>"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2016-04-04 07:45:27 +00:00
2016-04-02 15:46:03 +00:00
live_config = join(ENV_ROOT, app, 'LIVE_ENV')
if exists(live_config):
2016-04-02 16:38:53 +00:00
echo(open(live_config).read().strip(), fg='white')
2016-04-05 06:27:35 +00:00
else:
echo("Warning: app '{}' not deployed, no config found.".format(app), fg='yellow')
2016-04-02 15:46:03 +00:00
2016-03-26 22:08:10 +00:00
@piku.command("deploy")
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_deploy(app):
"""e.g.: piku deploy <app>"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2016-03-26 22:19:30 +00:00
do_deploy(app)
2016-03-26 22:47:39 +00:00
2016-03-29 20:18:38 +00:00
@piku.command("destroy")
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_destroy(app):
"""e.g.: piku destroy <app>"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2019-11-19 08:09:08 +00:00
2016-03-29 20:18:38 +00:00
for p in [join(x, app) for x in [APP_ROOT, GIT_ROOT, ENV_ROOT, LOG_ROOT]]:
if exists(p):
echo("Removing folder '{}'".format(p), fg='yellow')
rmtree(p)
2016-04-01 21:53:08 +00:00
for p in [join(x, '{}*.ini'.format(app)) for x in [UWSGI_AVAILABLE, UWSGI_ENABLED]]:
2016-03-31 22:53:35 +00:00
g = glob(p)
if len(g) > 0:
2016-03-31 22:53:35 +00:00
for f in g:
echo("Removing file '{}'".format(f), fg='yellow')
remove(f)
2019-11-19 08:09:08 +00:00
nginx_files = [join(NGINX_ROOT, "{}.{}".format(app, x)) for x in ['conf', 'sock', 'key', 'crt']]
2016-04-25 11:12:18 +00:00
for f in nginx_files:
2016-04-25 10:50:37 +00:00
if exists(f):
echo("Removing file '{}'".format(f), fg='yellow')
remove(f)
2016-03-26 22:47:39 +00:00
acme_link = join(ACME_WWW, app)
acme_certs = realpath(acme_link)
if exists(acme_certs):
echo("Removing folder '{}'".format(acme_certs), fg='yellow')
rmtree(acme_certs)
echo("Removing file '{}'".format(acme_link), fg='yellow')
unlink(acme_link)
2019-11-19 08:09:08 +00:00
2016-04-06 21:46:32 +00:00
@piku.command("logs")
2016-03-29 20:18:38 +00:00
@argument('app')
@argument('process', nargs=1, default='*')
def cmd_logs(app, process):
"""Tail running logs, e.g: piku logs <app> [<process>]"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2016-04-02 17:09:39 +00:00
logfiles = glob(join(LOG_ROOT, app, process + '.*.log'))
if len(logfiles) > 0:
2016-04-06 21:46:32 +00:00
for line in multi_tail(app, logfiles):
echo(line.strip(), fg='white')
2016-03-29 20:18:38 +00:00
else:
echo("No logs found for app '{}'.".format(app), fg='yellow')
2016-03-29 20:18:38 +00:00
2016-04-02 18:22:51 +00:00
@piku.command("ps")
2016-04-02 16:38:53 +00:00
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_ps(app):
2019-05-22 12:20:10 +00:00
"""Show process count, e.g: piku ps <app>"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2016-04-04 07:45:27 +00:00
2016-04-02 16:38:53 +00:00
config_file = join(ENV_ROOT, app, 'SCALING')
if exists(config_file):
echo(open(config_file).read().strip(), fg='white')
2016-04-06 09:09:05 +00:00
else:
echo("Error: no workers found for app '{}'.".format(app), fg='red')
2016-04-02 16:38:53 +00:00
2016-04-02 18:22:51 +00:00
@piku.command("ps:scale")
2016-04-02 16:38:53 +00:00
@argument('app')
@argument('settings', nargs=-1)
2018-12-23 12:21:41 +00:00
def cmd_ps_scale(app, settings):
2019-05-22 12:27:42 +00:00
"""e.g.: piku ps:scale <app> <proc>=<count>"""
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2016-04-04 07:45:27 +00:00
2016-04-02 16:38:53 +00:00
config_file = join(ENV_ROOT, app, 'SCALING')
worker_count = {k: int(v) for k, v in parse_procfile(config_file).items()}
deltas = {}
2016-04-02 16:38:53 +00:00
for s in settings:
try:
2016-04-02 18:22:51 +00:00
k, v = map(lambda x: x.strip(), s.split("=", 1))
c = int(v) # check for integer value
2016-04-02 16:49:19 +00:00
if c < 0:
echo("Error: cannot scale type '{}' below 0".format(k), fg='red')
2016-04-02 16:49:19 +00:00
return
2016-04-02 16:38:53 +00:00
if k not in worker_count:
echo("Error: worker type '{}' not present in '{}'".format(k, app), fg='red')
2016-04-02 16:38:53 +00:00
return
2016-04-02 22:37:42 +00:00
deltas[k] = c - worker_count[k]
2019-11-20 19:37:37 +00:00
except Exception:
echo("Error: malformed setting '{}'".format(s), fg='red')
2016-04-02 16:38:53 +00:00
return
do_deploy(app, deltas)
2016-04-02 16:38:53 +00:00
@piku.command("run")
@argument('app')
@argument('cmd', nargs=-1)
2018-12-23 12:21:41 +00:00
def cmd_run(app, cmd):
"""e.g.: piku run <app> ls -- -al"""
app = exit_if_invalid(app)
2016-09-09 23:46:04 +00:00
config_file = join(ENV_ROOT, app, 'LIVE_ENV')
environ.update(parse_settings(config_file))
for f in [stdout, stderr]:
fl = fcntl(f, F_GETFL)
fcntl(f, F_SETFL, fl | O_NONBLOCK)
p = Popen(' '.join(cmd), stdin=stdin, stdout=stdout, stderr=stderr, env=environ, cwd=join(APP_ROOT, app), shell=True)
2019-11-19 08:09:08 +00:00
p.communicate()
2016-04-06 21:46:32 +00:00
@piku.command("restart")
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_restart(app):
2019-05-22 12:20:10 +00:00
"""Restart an app: piku restart <app>"""
2019-11-19 08:09:08 +00:00
2016-04-06 21:46:32 +00:00
app = exit_if_invalid(app)
do_restart(app)
2016-04-06 21:46:32 +00:00
2016-04-03 21:36:04 +00:00
@piku.command("setup")
2018-12-23 12:21:41 +00:00
def cmd_setup():
2016-04-04 08:12:25 +00:00
"""Initialize environment"""
2017-05-14 22:34:29 +00:00
echo("Running in Python {}".format(".".join(map(str, version_info))))
2019-11-19 08:09:08 +00:00
2016-04-04 08:12:25 +00:00
# Create required paths
2016-04-25 12:23:00 +00:00
for p in [APP_ROOT, GIT_ROOT, ENV_ROOT, UWSGI_ROOT, UWSGI_AVAILABLE, UWSGI_ENABLED, LOG_ROOT, NGINX_ROOT]:
2016-04-03 21:36:04 +00:00
if not exists(p):
echo("Creating '{}'.".format(p), fg='green')
makedirs(p)
2019-11-19 08:09:08 +00:00
2016-04-04 08:12:25 +00:00
# Set up the uWSGI emperor config
settings = [
('chdir', UWSGI_ROOT),
('emperor', UWSGI_ENABLED),
('log-maxsize', UWSGI_LOG_MAXSIZE),
('logto', join(UWSGI_ROOT, 'uwsgi.log')),
('log-backupname', join(UWSGI_ROOT, 'uwsgi.old.log')),
('socket', join(UWSGI_ROOT, 'uwsgi.sock')),
('uid', getpwuid(getuid()).pw_name),
('gid', getgrgid(getgid()).gr_name),
('enable-threads', 'true'),
('threads', '{}'.format(cpu_count() * 2)),
2016-04-04 08:12:25 +00:00
]
with open(join(UWSGI_ROOT, 'uwsgi.ini'), 'w') as h:
2016-04-04 08:12:25 +00:00
h.write('[uwsgi]\n')
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2016-04-04 08:12:25 +00:00
for k, v in settings:
2018-12-23 15:07:52 +00:00
h.write("{k:s} = {v}\n".format(**locals()))
2016-04-04 08:12:25 +00:00
2016-04-03 21:36:04 +00:00
# mark this script as executable (in case we were invoked via interpreter)
if not (stat(PIKU_SCRIPT).st_mode & S_IXUSR):
2018-12-23 14:01:25 +00:00
echo("Setting '{}' as executable.".format(PIKU_SCRIPT), fg='yellow')
2018-12-23 14:03:08 +00:00
chmod(PIKU_SCRIPT, stat(PIKU_SCRIPT).st_mode | S_IXUSR)
2016-04-03 21:36:04 +00:00
@piku.command("setup:ssh")
@argument('public_key_file')
2018-12-23 12:21:41 +00:00
def cmd_setup_ssh(public_key_file):
2017-05-14 22:34:29 +00:00
"""Set up a new SSH key (use - for stdin)"""
def add_helper(key_file):
if exists(key_file):
try:
fingerprint = str(check_output('ssh-keygen -lf ' + key_file, shell=True)).split(' ', 4)[1]
key = open(key_file, 'r').read().strip()
2018-12-23 14:14:54 +00:00
echo("Adding key '{}'.".format(fingerprint), fg='white')
2018-12-23 14:01:25 +00:00
setup_authorized_keys(fingerprint, PIKU_SCRIPT, key)
2018-12-23 15:31:55 +00:00
except Exception:
2018-12-23 14:14:54 +00:00
echo("Error: invalid public key file '{}': {}".format(key_file, format_exc()), fg='red')
elif public_key_file == '-':
2017-05-14 22:34:29 +00:00
buffer = "".join(stdin.readlines())
with NamedTemporaryFile(mode="w") as f:
f.write(buffer)
f.flush()
add_helper(f.name)
else:
2018-12-23 14:14:54 +00:00
echo("Error: public key file '{}' not found.".format(key_file), fg='red')
2017-05-14 22:34:29 +00:00
add_helper(public_key_file)
2016-04-06 21:46:32 +00:00
@piku.command("stop")
2016-04-02 15:46:03 +00:00
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_stop(app):
2019-05-22 12:37:18 +00:00
"""Stop an app, e.g: piku stop <app>"""
2017-05-14 22:34:29 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid(app)
2018-12-23 14:14:54 +00:00
config = glob(join(UWSGI_ENABLED, '{}*.ini'.format(app)))
2016-04-04 07:45:27 +00:00
if len(config) > 0:
2018-12-23 14:14:54 +00:00
echo("Stopping app '{}'...".format(app), fg='yellow')
2016-04-06 21:46:32 +00:00
for c in config:
remove(c)
2016-04-02 15:46:03 +00:00
else:
2018-12-23 14:14:54 +00:00
echo("Error: app '{}' not deployed!".format(app), fg='red')
2019-11-19 08:09:08 +00:00
2016-03-29 20:18:38 +00:00
# --- Internal commands ---
2016-03-26 22:47:39 +00:00
@piku.command("git-hook")
2016-03-26 12:52:54 +00:00
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_git_hook(app):
2016-03-28 22:35:31 +00:00
"""INTERNAL: Post-receive git hook"""
2019-11-19 08:09:08 +00:00
2016-03-26 21:33:02 +00:00
app = sanitize_app_name(app)
2016-03-27 12:08:30 +00:00
repo_path = join(GIT_ROOT, app)
app_path = join(APP_ROOT, app)
2019-11-19 08:09:08 +00:00
for line in stdin:
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2016-03-26 17:15:19 +00:00
oldrev, newrev, refname = line.strip().split(" ")
# Handle pushes
if not exists(app_path):
echo("-----> Creating app '{}'".format(app), fg='green')
makedirs(app_path)
call('git clone --quiet {} {}'.format(repo_path, app), cwd=APP_ROOT, shell=True)
do_deploy(app, newrev=newrev)
2016-03-29 20:18:38 +00:00
@piku.command("git-receive-pack")
2016-03-29 20:18:38 +00:00
@argument('app')
2018-12-23 12:21:41 +00:00
def cmd_git_receive_pack(app):
2016-03-29 20:18:38 +00:00
"""INTERNAL: Handle git pushes for an app"""
2017-05-15 20:37:14 +00:00
2016-03-29 20:18:38 +00:00
app = sanitize_app_name(app)
hook_path = join(GIT_ROOT, app, 'hooks', 'post-receive')
2018-12-23 14:19:52 +00:00
env = globals()
2018-12-23 15:07:52 +00:00
env.update(locals())
2017-05-15 20:37:14 +00:00
2016-03-29 20:18:38 +00:00
if not exists(hook_path):
makedirs(dirname(hook_path))
2016-03-29 20:18:38 +00:00
# Initialize the repository with a hook to this script
call("git init --quiet --bare " + app, cwd=GIT_ROOT, shell=True)
2017-05-15 20:37:14 +00:00
with open(hook_path, 'w') as h:
2016-03-29 20:18:38 +00:00
h.write("""#!/usr/bin/env bash
set -e; set -o pipefail;
2018-12-23 15:07:52 +00:00
cat | PIKU_ROOT="{PIKU_ROOT:s}" {PIKU_SCRIPT:s} git-hook {app:s}""".format(**env))
2016-03-29 20:18:38 +00:00
# Make the hook executable by our user
chmod(hook_path, stat(hook_path).st_mode | S_IXUSR)
2016-03-29 20:18:38 +00:00
# Handle the actual receive. We'll be called with 'git-hook' after it happens
call('git-shell -c "{}" '.format(argv[1] + " '{}'".format(app)), cwd=GIT_ROOT, shell=True)
2017-05-15 20:37:14 +00:00
@piku.command("git-upload-pack")
@argument('app')
def cmd_git_upload_pack(app):
"""INTERNAL: Handle git upload pack for an app"""
app = sanitize_app_name(app)
env = globals()
env.update(locals())
# Handle the actual receive. We'll be called with 'git-hook' after it happens
call('git-shell -c "{}" '.format(argv[1] + " '{}'".format(app)), cwd=GIT_ROOT, shell=True)
def _get_plugin_commands(path):
sys_path.append(abspath(path))
cli_commands = []
if isdir(path):
for item in listdir(path):
module_path = join(path, item)
if isdir(module_path):
try:
module = import_module(item)
2019-11-26 04:39:01 +00:00
except Exception:
module = None
if hasattr(module, 'cli_commands'):
cli_commands.append(module.cli_commands())
return cli_commands
2019-11-26 04:35:11 +00:00
@piku.command("help")
@pass_context
def cmd_help(ctx):
"""display help for piku"""
echo(ctx.parent.get_help())
@piku.command("update")
def cmd_update():
"""Update the piku cli"""
echo("Updating piku...")
with NamedTemporaryFile(mode="w") as f:
tempfile = f.name
cmd = """curl -sL -w %{{http_code}} {} -o {}""".format(PIKU_RAW_SOURCE_URL, tempfile)
response = check_output(cmd.split(' '), stderr=STDOUT)
http_code = response.decode('utf8').strip()
if http_code == "200":
copyfile(tempfile, PIKU_SCRIPT)
echo("Update successful.")
else:
echo("Error updating piku - please check if {} is accessible from this machine.".format(PIKU_RAW_SOURCE_URL))
echo("Done.")
2016-03-26 12:52:54 +00:00
if __name__ == '__main__':
2021-12-08 16:17:53 +00:00
cli_commands = _get_plugin_commands(path=PIKU_PLUGIN_ROOT)
cli_commands.append(piku)
cli = CommandCollection(sources=cli_commands)
cli()