2018-06-14 18:37:53 +00:00
#!/usr/bin/env python3
2016-03-26 12:52:54 +00:00
2017-05-14 22:34:29 +00:00
" Piku Micro-PaaS "
2018-07-24 20:12:08 +00:00
try :
from sys import version_info
2023-01-08 14:33:56 +00:00
assert version_info > = ( 3 , 7 )
2018-07-24 20:12:08 +00:00
except AssertionError :
2023-01-08 14:33:56 +00:00
exit ( " Piku requires Python 3.7 or above " )
2018-07-24 20:12:08 +00:00
2019-11-17 02:38:33 +00:00
from importlib import import_module
2016-04-01 23:16:10 +00:00
from collections import defaultdict , deque
2016-09-10 09:37:37 +00:00
from fcntl import fcntl , F_SETFL , F_GETFL
2016-04-01 23:16:10 +00:00
from glob import glob
2016-05-05 19:00:25 +00:00
from json import loads
2016-04-04 08:12:25 +00:00
from multiprocessing import cpu_count
2019-11-27 16:05:52 +00:00
from os import chmod , getgid , getuid , symlink , unlink , remove , stat , listdir , environ , makedirs , O_NONBLOCK
2019-11-27 23:17:40 +00:00
from os . path import abspath , basename , dirname , exists , getmtime , join , realpath , splitext , isdir
2019-11-20 08:28:02 +00:00
from pwd import getpwuid
2019-11-27 16:05:52 +00:00
from grp import getgrgid
2021-03-07 19:40:52 +00:00
from re import sub , match
2018-06-24 20:54:29 +00:00
from shutil import copyfile , rmtree , which
2016-09-10 09:37:37 +00:00
from socket import socket , AF_INET , SOCK_STREAM
from stat import S_IRUSR , S_IWUSR , S_IXUSR
2019-11-20 08:28:02 +00:00
from subprocess import call , check_output , Popen , STDOUT
2019-11-27 13:34:12 +00:00
from sys import argv , stdin , stdout , stderr , version_info , exit , path as sys_path
2017-05-14 22:34:29 +00:00
from tempfile import NamedTemporaryFile
2016-03-29 20:57:51 +00:00
from time import sleep
2019-11-20 08:28:02 +00:00
from traceback import format_exc
2018-06-24 20:54:29 +00:00
from urllib . request import urlopen
2019-11-20 08:28:02 +00:00
2019-11-26 04:35:11 +00:00
from click import argument , group , secho as echo , pass_context , CommandCollection
2018-06-24 20:54:29 +00:00
# === Make sure we can access all system binaries ===
if ' sbin ' not in environ [ ' PATH ' ] :
2018-06-24 22:02:56 +00:00
environ [ ' PATH ' ] = " /usr/local/sbin:/usr/sbin:/sbin: " + environ [ ' PATH ' ]
2016-03-26 12:52:54 +00:00
2016-03-29 20:18:38 +00:00
# === Globals - all tweakable settings are here ===
2016-03-27 12:12:13 +00:00
2021-01-21 18:52:03 +00:00
PIKU_RAW_SOURCE_URL = " https://raw.githubusercontent.com/piku/piku/master/piku.py "
2019-11-20 08:28:02 +00:00
PIKU_ROOT = environ . get ( ' PIKU_ROOT ' , join ( environ [ ' HOME ' ] , ' .piku ' ) )
PIKU_BIN = join ( environ [ ' HOME ' ] , ' bin ' )
2018-12-23 14:01:25 +00:00
PIKU_SCRIPT = realpath ( __file__ )
2021-12-08 16:17:53 +00:00
PIKU_PLUGIN_ROOT = abspath ( join ( PIKU_ROOT , " plugins " ) )
2016-03-27 12:08:30 +00:00
APP_ROOT = abspath ( join ( PIKU_ROOT , " apps " ) )
2022-12-27 15:11:31 +00:00
DATA_ROOT = abspath ( join ( PIKU_ROOT , " data " ) )
2016-03-27 12:20:44 +00:00
ENV_ROOT = abspath ( join ( PIKU_ROOT , " envs " ) )
2016-03-27 12:08:30 +00:00
GIT_ROOT = abspath ( join ( PIKU_ROOT , " repos " ) )
LOG_ROOT = abspath ( join ( PIKU_ROOT , " logs " ) )
2016-04-25 09:10:07 +00:00
NGINX_ROOT = abspath ( join ( PIKU_ROOT , " nginx " ) )
2022-12-27 15:11:31 +00:00
CACHE_ROOT = abspath ( join ( PIKU_ROOT , " cache " ) )
2016-03-28 22:37:36 +00:00
UWSGI_AVAILABLE = abspath ( join ( PIKU_ROOT , " uwsgi-available " ) )
UWSGI_ENABLED = abspath ( join ( PIKU_ROOT , " uwsgi-enabled " ) )
UWSGI_ROOT = abspath ( join ( PIKU_ROOT , " uwsgi " ) )
2016-04-04 08:12:25 +00:00
UWSGI_LOG_MAXSIZE = ' 1048576 '
2019-11-20 08:28:02 +00:00
ACME_ROOT = environ . get ( ' ACME_ROOT ' , join ( environ [ ' HOME ' ] , ' .acme.sh ' ) )
2019-06-23 14:47:58 +00:00
ACME_WWW = abspath ( join ( PIKU_ROOT , " acme " ) )
2022-03-07 09:01:13 +00:00
ACME_ROOT_CA = environ . get ( ' ACME_ROOT_CA ' , ' letsencrypt.org ' )
2018-12-23 15:31:55 +00:00
2019-09-29 17:43:07 +00:00
# === Make sure we can access piku user-installed binaries === #
if PIKU_BIN not in environ [ ' PATH ' ] :
environ [ ' PATH ' ] = PIKU_BIN + " : " + environ [ ' PATH ' ]
2018-12-23 15:31:55 +00:00
# pylint: disable=anomalous-backslash-in-string
2016-04-25 09:10:07 +00:00
NGINX_TEMPLATE = """
2022-12-27 15:11:31 +00:00
$ PIKU_INTERNAL_PROXY_CACHE_PATH
2016-04-25 09:10:07 +00:00
upstream $ APP {
2016-05-03 21:18:03 +00:00
server $ NGINX_SOCKET ;
2016-04-25 09:10:07 +00:00
}
server {
2018-12-23 13:59:48 +00:00
listen $ NGINX_IPV6_ADDRESS : 80 ;
listen $ NGINX_IPV4_ADDRESS : 80 ;
2016-04-25 12:23:00 +00:00
2019-06-23 14:47:58 +00:00
location ^ ~ / . well - known / acme - challenge {
allow all ;
root $ { ACME_WWW } ;
}
2021-12-07 09:23:34 +00:00
$ PIKU_INTERNAL_NGINX_COMMON
2016-04-25 09:10:07 +00:00
}
"""
2016-03-27 12:12:13 +00:00
2018-08-29 15:51:04 +00:00
NGINX_HTTPS_ONLY_TEMPLATE = """
2022-12-27 15:11:31 +00:00
$ PIKU_INTERNAL_PROXY_CACHE_PATH
2018-08-29 15:51:04 +00:00
upstream $ APP {
server $ NGINX_SOCKET ;
}
server {
2018-12-23 13:59:48 +00:00
listen $ NGINX_IPV6_ADDRESS : 80 ;
listen $ NGINX_IPV4_ADDRESS : 80 ;
2018-08-29 15:51:04 +00:00
server_name $ NGINX_SERVER_NAME ;
2019-06-23 14:47:58 +00:00
location ^ ~ / . well - known / acme - challenge {
allow all ;
root $ { ACME_WWW } ;
}
2019-09-27 06:29:21 +00:00
location / {
return 301 https : / / $ server_name $ request_uri ;
}
2018-08-29 15:51:04 +00:00
}
server {
2021-12-07 09:23:34 +00:00
$ PIKU_INTERNAL_NGINX_COMMON
2019-08-05 08:09:11 +00:00
}
"""
# pylint: enable=anomalous-backslash-in-string
NGINX_COMMON_FRAGMENT = """
2018-12-23 13:59:48 +00:00
listen $ NGINX_IPV6_ADDRESS : $ NGINX_SSL ;
listen $ NGINX_IPV4_ADDRESS : $ NGINX_SSL ;
2018-08-29 15:51:04 +00:00
ssl_certificate $ NGINX_ROOT / $ APP . crt ;
ssl_certificate_key $ NGINX_ROOT / $ APP . key ;
server_name $ NGINX_SERVER_NAME ;
# These are not required under systemd - enable for debugging only
# access_log $LOG_ROOT/$APP/access.log;
# error_log $LOG_ROOT/$APP/error.log;
2019-11-19 08:09:08 +00:00
2018-08-29 15:51:04 +00:00
# Enable gzip compression
gzip on ;
gzip_proxied any ;
2022-04-16 05:30:16 +00:00
gzip_types text / plain text / xml text / css text / javascript text / js application / x - javascript application / javascript application / json application / xml + rss application / atom + xml image / svg + xml ;
2018-08-29 15:51:04 +00:00
gzip_comp_level 7 ;
gzip_min_length 2048 ;
gzip_vary on ;
2019-11-20 19:41:04 +00:00
gzip_disable " MSIE [1-6] \ .(?!.*SV1) " ;
2018-08-29 15:51:04 +00:00
# set a custom header for requests
add_header X - Deployed - By Piku ;
2021-12-07 09:23:34 +00:00
$ PIKU_INTERNAL_NGINX_CUSTOM_CLAUSES
$ PIKU_INTERNAL_NGINX_STATIC_MAPPINGS
2022-12-27 15:11:31 +00:00
$ PIKU_INTERNAL_NGINX_CACHE_MAPPINGS
2021-12-07 09:23:34 +00:00
$ PIKU_INTERNAL_NGINX_BLOCK_GIT
$ PIKU_INTERNAL_NGINX_PORTMAP
2019-08-30 10:07:55 +00:00
"""
NGINX_PORTMAP_FRAGMENT = """
2018-08-29 15:51:04 +00:00
location / {
2021-12-07 09:23:34 +00:00
$ PIKU_INTERNAL_NGINX_UWSGI_SETTINGS
2018-08-29 15:51:04 +00:00
proxy_http_version 1.1 ;
proxy_set_header Upgrade $ http_upgrade ;
proxy_set_header Connection " upgrade " ;
2020-03-31 17:59:29 +00:00
proxy_set_header Host $ host ;
2018-08-29 15:51:04 +00:00
proxy_set_header X - Forwarded - Proto $ scheme ;
2020-03-19 17:31:33 +00:00
proxy_set_header X - Forwarded - For $ proxy_add_x_forwarded_for ;
proxy_set_header X - Remote - Address $ remote_addr ;
2018-08-29 15:51:04 +00:00
proxy_set_header X - Forwarded - Port $ server_port ;
proxy_set_header X - Request - Start $ msec ;
$ NGINX_ACL
2019-08-30 10:07:55 +00:00
}
2018-08-29 15:51:04 +00:00
"""
2019-06-23 14:47:58 +00:00
NGINX_ACME_FIRSTRUN_TEMPLATE = """
server {
listen $ NGINX_IPV6_ADDRESS : 80 ;
listen $ NGINX_IPV4_ADDRESS : 80 ;
server_name $ NGINX_SERVER_NAME ;
location ^ ~ / . well - known / acme - challenge {
allow all ;
root $ { ACME_WWW } ;
}
}
"""
2021-12-07 09:23:34 +00:00
PIKU_INTERNAL_NGINX_STATIC_MAPPING = """
2019-06-23 14:44:57 +00:00
location $ static_url {
2016-10-02 17:44:28 +00:00
sendfile on ;
2016-10-02 17:40:02 +00:00
sendfile_max_chunk 1 m ;
2016-10-02 17:44:28 +00:00
tcp_nopush on ;
directio 8 m ;
aio threads ;
2019-06-23 14:44:57 +00:00
alias $ static_path ;
2021-01-13 00:04:49 +00:00
try_files $ uri $ uri . html $ uri / = 404 ;
2016-10-02 17:40:02 +00:00
}
"""
2022-12-27 15:11:31 +00:00
PIKU_INTERNAL_PROXY_CACHE_PATH = """
2022-12-30 18:35:13 +00:00
uwsgi_cache_path $ cache_path levels = 1 : 2 keys_zone = $ app : 20 m inactive = $ cache_time_expiry max_size = $ cache_size use_temp_path = off ;
2022-12-27 15:11:31 +00:00
"""
PIKU_INTERNAL_NGINX_CACHE_MAPPING = """
location ~ * ^ / ( $ cache_prefixes ) {
uwsgi_cache $ APP ;
uwsgi_cache_min_uses 1 ;
uwsgi_cache_key $ host $ uri ;
2022-12-30 18:35:13 +00:00
uwsgi_cache_valid 200 304 $ cache_time_content ;
uwsgi_cache_valid 301 307 $ cache_time_redirects ;
2022-12-27 15:11:31 +00:00
uwsgi_cache_valid 500 502 503 504 0 s ;
2022-12-30 18:35:13 +00:00
uwsgi_cache_valid any $ cache_time_any ;
2022-12-27 15:11:31 +00:00
uwsgi_hide_header Cache - Control ;
2022-12-30 18:35:13 +00:00
add_header Cache - Control " public, max-age=$cache_time_control " ;
2022-12-27 15:11:31 +00:00
add_header X - Cache $ upstream_cache_status ;
$ PIKU_INTERNAL_NGINX_UWSGI_SETTINGS
}
"""
2021-12-07 09:23:34 +00:00
PIKU_INTERNAL_NGINX_UWSGI_SETTINGS = """
2018-08-22 18:41:31 +00:00
uwsgi_pass $ APP ;
uwsgi_param QUERY_STRING $ query_string ;
uwsgi_param REQUEST_METHOD $ request_method ;
uwsgi_param CONTENT_TYPE $ content_type ;
uwsgi_param CONTENT_LENGTH $ content_length ;
uwsgi_param REQUEST_URI $ request_uri ;
uwsgi_param PATH_INFO $ document_uri ;
uwsgi_param DOCUMENT_ROOT $ document_root ;
uwsgi_param SERVER_PROTOCOL $ server_protocol ;
2023-01-16 23:22:49 +00:00
uwsgi_param X_FORWARDED_FOR $ proxy_add_x_forwarded_for ;
2018-08-22 18:41:31 +00:00
uwsgi_param REMOTE_ADDR $ remote_addr ;
uwsgi_param REMOTE_PORT $ remote_port ;
uwsgi_param SERVER_ADDR $ server_addr ;
uwsgi_param SERVER_PORT $ server_port ;
uwsgi_param SERVER_NAME $ server_name ;
"""
2021-03-07 19:40:52 +00:00
CRON_REGEXP = " ^((?:(?: \ * \ /)? \ d+)| \ *) ((?:(?: \ * \ /)? \ d+)| \ *) ((?:(?: \ * \ /)? \ d+)| \ *) ((?:(?: \ * \ /)? \ d+)| \ *) ((?:(?: \ * \ /)? \ d+)| \ *) (.*)$ "
2016-03-29 20:18:38 +00:00
# === Utility functions ===
2016-03-26 12:52:54 +00:00
2023-01-07 18:05:02 +00:00
2016-03-26 21:33:02 +00:00
def sanitize_app_name ( app ) :
2016-03-26 17:14:13 +00:00
""" Sanitize the app name and build matching path """
2019-11-19 08:09:08 +00:00
2021-12-11 12:36:05 +00:00
app = " " . join ( c for c in app if c . isalnum ( ) or c in ( ' . ' , ' _ ' , ' - ' ) ) . rstrip ( ) . lstrip ( ' / ' )
2016-03-26 21:33:02 +00:00
return app
2016-03-26 17:14:13 +00:00
2016-04-06 09:09:05 +00:00
def exit_if_invalid ( app ) :
""" Utility function for error checking upon command startup. """
app = sanitize_app_name ( app )
if not exists ( join ( APP_ROOT , app ) ) :
2018-12-23 13:59:48 +00:00
echo ( " Error: app ' {} ' not found. " . format ( app ) , fg = ' red ' )
2016-04-06 09:09:05 +00:00
exit ( 1 )
return app
2016-03-26 12:52:54 +00:00
def get_free_port ( address = " " ) :
2016-03-28 18:28:24 +00:00
""" Find a free TCP port (entirely at random) """
2019-11-19 08:09:08 +00:00
2016-09-10 09:37:37 +00:00
s = socket ( AF_INET , SOCK_STREAM )
2020-02-04 15:20:35 +00:00
s . bind ( ( address , 0 ) ) # lgtm [py/bind-socket-all-network-interfaces]
2016-03-26 12:52:54 +00:00
port = s . getsockname ( ) [ 1 ]
s . close ( )
return port
2016-03-26 17:28:01 +00:00
2023-03-25 04:30:29 +00:00
def get_boolean ( value ) :
""" Convert a boolean-ish string to a boolean. """
return value . lower ( ) in [ ' 1 ' , ' on ' , ' true ' , ' enabled ' , ' yes ' , ' y ' ]
2016-04-02 16:38:53 +00:00
def write_config ( filename , bag , separator = ' = ' ) :
""" Helper for writing out config files """
2019-11-19 08:09:08 +00:00
2016-04-02 16:38:53 +00:00
with open ( filename , ' w ' ) as h :
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2018-06-14 21:03:32 +00:00
for k , v in bag . items ( ) :
2018-12-23 15:07:52 +00:00
h . write ( ' {k:s} {separator:s} {v} \n ' . format ( * * locals ( ) ) )
2016-04-02 16:38:53 +00:00
2016-03-26 12:52:54 +00:00
def setup_authorized_keys ( ssh_fingerprint , script_path , pubkey ) :
2016-03-26 17:39:23 +00:00
""" Sets up an authorized_keys file to redirect SSH commands """
2019-11-19 08:09:08 +00:00
2019-11-20 08:28:02 +00:00
authorized_keys = join ( environ [ ' HOME ' ] , ' .ssh ' , ' authorized_keys ' )
2016-03-27 12:18:04 +00:00
if not exists ( dirname ( authorized_keys ) ) :
2016-09-10 09:37:37 +00:00
makedirs ( dirname ( authorized_keys ) )
2019-11-19 08:09:08 +00:00
# Restrict features and force all SSH commands to go through our script
2016-03-28 17:40:52 +00:00
with open ( authorized_keys , ' a ' ) as h :
2018-12-23 13:59:48 +00:00
h . write ( """ command= " FINGERPRINT= {ssh_fingerprint:s} NAME=default {script_path:s} $SSH_ORIGINAL_COMMAND " ,no-agent-forwarding,no-user-rc,no-X11-forwarding,no-port-forwarding {pubkey:s} \n """ . format ( * * locals ( ) ) )
2016-09-10 09:37:37 +00:00
chmod ( dirname ( authorized_keys ) , S_IRUSR | S_IWUSR | S_IXUSR )
chmod ( authorized_keys , S_IRUSR | S_IWUSR )
2016-04-03 16:14:15 +00:00
2016-03-27 12:12:13 +00:00
2016-03-29 19:24:17 +00:00
def parse_procfile ( filename ) :
""" Parses a Procfile and returns the worker types. Only one worker of each type is allowed. """
2019-11-19 08:09:08 +00:00
2016-03-29 19:24:17 +00:00
workers = { }
if not exists ( filename ) :
return None
2019-11-18 09:26:26 +00:00
2016-03-29 19:24:17 +00:00
with open ( filename , ' r ' ) as procfile :
2019-11-18 09:26:26 +00:00
for line_number , line in enumerate ( procfile ) :
line = line . strip ( )
if line . startswith ( " # " ) or not line :
continue
2016-03-29 19:38:48 +00:00
try :
kind , command = map ( lambda x : x . strip ( ) , line . split ( " : " , 1 ) )
2021-03-07 19:40:52 +00:00
# Check for cron patterns
if kind == " cron " :
limits = [ 59 , 24 , 31 , 12 , 7 ]
2023-02-02 23:11:38 +00:00
res = match ( CRON_REGEXP , command )
if res :
matches = res . groups ( )
2021-03-07 19:40:52 +00:00
for i in range ( len ( limits ) ) :
2023-02-02 23:11:38 +00:00
if int ( matches [ i ] . replace ( " */ " , " " ) . replace ( " * " , " 1 " ) ) > limits [ i ] :
2021-03-07 19:40:52 +00:00
raise ValueError
2016-11-20 10:05:35 +00:00
workers [ kind ] = command
2019-11-20 19:27:20 +00:00
except Exception :
2021-03-07 19:40:52 +00:00
echo ( " Warning: misformatted Procfile entry ' {} ' at line {} " . format ( line , line_number ) , fg = ' yellow ' )
2019-11-20 08:28:02 +00:00
if len ( workers ) == 0 :
2016-04-02 16:38:53 +00:00
return { }
2016-03-29 19:24:17 +00:00
# WSGI trumps regular web workers
2020-01-07 14:30:27 +00:00
if ' wsgi ' in workers or ' jwsgi ' in workers or ' rwsgi ' in workers :
2016-03-29 19:24:17 +00:00
if ' web ' in workers :
2019-06-20 14:03:58 +00:00
echo ( " Warning: found both ' wsgi ' and ' web ' workers, disabling ' web ' " , fg = ' yellow ' )
2019-11-20 08:28:02 +00:00
del workers [ ' web ' ]
2019-11-19 08:09:08 +00:00
return workers
2016-03-31 22:39:29 +00:00
2016-04-25 09:24:35 +00:00
def expandvars ( buffer , env , default = None , skip_escaped = False ) :
""" expand shell-style environment variables in a buffer """
2019-11-19 08:09:08 +00:00
2016-04-25 09:24:35 +00:00
def replace_var ( match ) :
return env . get ( match . group ( 2 ) or match . group ( 1 ) , match . group ( 0 ) if default is None else default )
2019-11-19 08:09:08 +00:00
2016-04-25 09:24:35 +00:00
pattern = ( r ' (?<! \\ ) ' if skip_escaped else ' ' ) + r ' \ $( \ w+| \ { ([^}]*) \ }) '
2016-09-10 09:37:37 +00:00
return sub ( pattern , replace_var , buffer )
2016-04-25 09:24:35 +00:00
2016-04-25 12:23:00 +00:00
def command_output ( cmd ) :
""" executes a command and grabs its output, if any """
try :
2016-09-10 09:37:37 +00:00
env = environ
2017-05-14 22:34:29 +00:00
return str ( check_output ( cmd , stderr = STDOUT , env = env , shell = True ) )
2019-11-20 19:27:20 +00:00
except Exception :
2016-04-25 12:23:00 +00:00
return " "
2016-04-01 23:51:49 +00:00
def parse_settings ( filename , env = { } ) :
2016-03-31 22:39:29 +00:00
""" Parses a settings file and returns a dict with environment variables """
2019-11-19 08:09:08 +00:00
2016-03-31 22:39:29 +00:00
if not exists ( filename ) :
2016-04-02 15:46:03 +00:00
return { }
2019-11-19 08:09:08 +00:00
2016-03-31 22:39:29 +00:00
with open ( filename , ' r ' ) as settings :
for line in settings :
2019-11-20 08:28:02 +00:00
if line [ 0 ] == ' # ' or len ( line . strip ( ) ) == 0 : # ignore comments and newlines
2016-04-02 19:13:57 +00:00
continue
2016-03-31 22:39:29 +00:00
try :
k , v = map ( lambda x : x . strip ( ) , line . split ( " = " , 1 ) )
2016-04-01 23:51:49 +00:00
env [ k ] = expandvars ( v , env )
2019-11-20 19:37:37 +00:00
except Exception :
2018-12-23 13:59:48 +00:00
echo ( " Error: malformed setting ' {} ' , ignoring file. " . format ( line ) , fg = ' red ' )
2016-04-02 16:38:53 +00:00
return { }
2016-03-31 22:39:29 +00:00
return env
2018-06-24 20:54:29 +00:00
def check_requirements ( binaries ) :
""" Checks if all the binaries exist and are executable """
2018-12-23 13:59:48 +00:00
echo ( " -----> Checking requirements: {} " . format ( binaries ) , fg = ' green ' )
2018-06-24 20:54:29 +00:00
requirements = list ( map ( which , binaries ) )
echo ( str ( requirements ) )
if None in requirements :
return False
return True
2019-11-13 17:57:04 +00:00
2019-11-20 08:28:02 +00:00
2019-11-13 17:57:04 +00:00
def found_app ( kind ) :
2019-11-20 08:28:02 +00:00
""" Helper function to output app detected """
2019-11-13 17:57:04 +00:00
echo ( " -----> {} app detected. " . format ( kind ) , fg = ' green ' )
return True
2016-03-27 12:12:13 +00:00
2019-11-20 08:28:02 +00:00
2019-08-05 08:07:47 +00:00
def do_deploy ( app , deltas = { } , newrev = None ) :
2016-03-27 12:12:13 +00:00
""" Deploy an app by resetting the work directory """
2019-11-19 08:09:08 +00:00
2016-03-27 12:12:13 +00:00
app_path = join ( APP_ROOT , app )
2016-03-29 19:24:17 +00:00
procfile = join ( app_path , ' Procfile ' )
2016-04-01 23:16:10 +00:00
log_path = join ( LOG_ROOT , app )
2016-03-27 12:12:13 +00:00
env = { ' GIT_WORK_DIR ' : app_path }
if exists ( app_path ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> Deploying app ' {} ' " . format ( app ) , fg = ' green ' )
2019-06-20 14:01:37 +00:00
call ( ' git fetch --quiet ' , cwd = app_path , env = env , shell = True )
2019-08-05 08:07:47 +00:00
if newrev :
call ( ' git reset --hard {} ' . format ( newrev ) , cwd = app_path , env = env , shell = True )
2019-06-20 13:59:38 +00:00
call ( ' git submodule init ' , cwd = app_path , env = env , shell = True )
call ( ' git submodule update ' , cwd = app_path , env = env , shell = True )
2016-04-01 23:16:10 +00:00
if not exists ( log_path ) :
2016-09-10 09:37:37 +00:00
makedirs ( log_path )
2016-03-29 19:24:17 +00:00
workers = parse_procfile ( procfile )
2019-11-20 08:28:02 +00:00
if workers and len ( workers ) > 0 :
2019-06-28 11:59:12 +00:00
settings = { }
2023-12-26 17:28:19 +00:00
if " preflight " in workers :
echo ( " -----> Running preflight. " , fg = ' green ' )
retval = call ( workers [ " preflight " ] , cwd = app_path , env = settings , shell = True )
if retval :
echo ( " -----> Exiting due to preflight command error value: {} " . format ( retval ) )
exit ( retval )
workers . pop ( " preflight " , None )
2019-11-13 17:57:04 +00:00
if exists ( join ( app_path , ' requirements.txt ' ) ) and found_app ( " Python " ) :
2019-06-28 11:59:12 +00:00
settings . update ( deploy_python ( app , deltas ) )
2022-03-06 13:05:37 +00:00
elif exists ( join ( app_path , ' Gemfile ' ) ) and found_app ( " Ruby Application " ) and check_requirements ( [ ' ruby ' , ' gem ' , ' bundle ' ] ) :
settings . update ( deploy_ruby ( app , deltas ) )
2019-11-20 08:28:02 +00:00
elif exists ( join ( app_path , ' package.json ' ) ) and found_app ( " Node " ) and (
2021-03-16 10:03:33 +00:00
check_requirements ( [ ' nodejs ' , ' npm ' ] ) or check_requirements ( [ ' node ' , ' npm ' ] ) or check_requirements ( [ ' nodeenv ' ] ) ) :
2019-06-28 11:59:12 +00:00
settings . update ( deploy_node ( app , deltas ) )
2019-11-13 17:57:04 +00:00
elif exists ( join ( app_path , ' pom.xml ' ) ) and found_app ( " Java Maven " ) and check_requirements ( [ ' java ' , ' mvn ' ] ) :
2021-11-01 19:53:58 +00:00
settings . update ( deploy_java_maven ( app , deltas ) )
2019-11-13 17:57:04 +00:00
elif exists ( join ( app_path , ' build.gradle ' ) ) and found_app ( " Java Gradle " ) and check_requirements ( [ ' java ' , ' gradle ' ] ) :
2021-11-01 19:53:58 +00:00
settings . update ( deploy_java_gradle ( app , deltas ) )
2019-11-20 08:28:02 +00:00
elif ( exists ( join ( app_path , ' Godeps ' ) ) or len ( glob ( join ( app_path , ' *.go ' ) ) ) ) and found_app ( " Go " ) and check_requirements ( [ ' go ' ] ) :
2019-06-28 11:59:12 +00:00
settings . update ( deploy_go ( app , deltas ) )
2023-07-20 13:28:48 +00:00
elif exists ( join ( app_path , ' deps.edn ' ) ) and found_app ( " Clojure CLI " ) and check_requirements ( [ ' java ' , ' clojure ' ] ) :
settings . update ( deploy_clojure_cli ( app , deltas ) )
2019-11-13 17:57:04 +00:00
elif exists ( join ( app_path , ' project.clj ' ) ) and found_app ( " Clojure Lein " ) and check_requirements ( [ ' java ' , ' lein ' ] ) :
2023-07-20 13:28:48 +00:00
settings . update ( deploy_clojure_leiningen ( app , deltas ) )
2019-08-31 08:38:14 +00:00
elif ' release ' in workers and ' web ' in workers :
echo ( " -----> Generic app detected. " , fg = ' green ' )
2019-09-06 17:02:41 +00:00
settings . update ( deploy_identity ( app , deltas ) )
2019-08-30 10:07:55 +00:00
elif ' static ' in workers :
echo ( " -----> Static app detected. " , fg = ' green ' )
settings . update ( deploy_identity ( app , deltas ) )
2016-03-29 19:24:17 +00:00
else :
echo ( " -----> Could not detect runtime! " , fg = ' red ' )
# TODO: detect other runtimes
2019-06-28 11:59:12 +00:00
if " release " in workers :
echo ( " -----> Releasing " , fg = ' green ' )
retval = call ( workers [ " release " ] , cwd = app_path , env = settings , shell = True )
if retval :
2019-08-05 08:08:48 +00:00
echo ( " -----> Exiting due to release command error value: {} " . format ( retval ) )
2019-06-28 11:59:12 +00:00
exit ( retval )
workers . pop ( " release " , None )
2016-03-28 18:28:24 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " Error: Invalid Procfile for app ' {} ' . " . format ( app ) , fg = ' red ' )
2016-03-27 12:12:13 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " Error: app ' {} ' not found. " . format ( app ) , fg = ' red ' )
2018-07-24 20:12:08 +00:00
2019-11-20 08:28:02 +00:00
2021-11-01 19:53:58 +00:00
def deploy_java_gradle ( app , deltas = { } ) :
2019-11-19 08:09:08 +00:00
""" Deploy a Java application using Gradle """
2019-09-15 08:35:45 +00:00
java_path = join ( ENV_ROOT , app )
build_path = join ( APP_ROOT , app , ' build ' )
2019-08-10 21:00:40 +00:00
env_file = join ( APP_ROOT , app , ' ENV ' )
2019-09-15 08:35:45 +00:00
env = {
2019-09-29 17:43:07 +00:00
' VIRTUAL_ENV ' : java_path ,
2019-11-20 08:28:02 +00:00
" PATH " : ' : ' . join ( [ join ( java_path , " bin " ) , join ( app , " .bin " ) , environ [ ' PATH ' ] ] )
2019-09-29 17:43:07 +00:00
}
2019-11-19 08:09:08 +00:00
2019-09-15 08:35:45 +00:00
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
2019-11-19 08:09:08 +00:00
2019-09-15 08:35:45 +00:00
if not exists ( java_path ) :
makedirs ( java_path )
if not exists ( build_path ) :
echo ( " -----> Building Java Application " )
call ( ' gradle build ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2019-08-10 21:00:40 +00:00
else :
2019-09-15 08:35:45 +00:00
echo ( " -----> Removing previous builds " )
echo ( " -----> Rebuilding Java Application " )
call ( ' gradle clean build ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2019-11-19 08:09:08 +00:00
2019-08-10 21:00:40 +00:00
return spawn_app ( app , deltas )
2018-07-24 20:12:08 +00:00
2019-11-20 08:28:02 +00:00
2021-11-01 19:53:58 +00:00
def deploy_java_maven ( app , deltas = { } ) :
2019-08-10 21:00:40 +00:00
""" Deploy a Java application using Maven """
# TODO: Use jenv to isolate Java Application environments
2019-09-15 08:35:45 +00:00
java_path = join ( ENV_ROOT , app )
2019-08-10 21:00:40 +00:00
target_path = join ( APP_ROOT , app , ' target ' )
env_file = join ( APP_ROOT , app , ' ENV ' )
2019-09-15 08:35:45 +00:00
env = {
2019-09-29 17:43:07 +00:00
' VIRTUAL_ENV ' : java_path ,
2019-11-20 08:28:02 +00:00
" PATH " : ' : ' . join ( [ join ( java_path , " bin " ) , join ( app , " .bin " ) , environ [ ' PATH ' ] ] )
2019-09-29 17:43:07 +00:00
}
2019-11-19 08:09:08 +00:00
2019-09-15 08:35:45 +00:00
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
2019-11-19 08:09:08 +00:00
2019-09-15 08:35:45 +00:00
if not exists ( java_path ) :
makedirs ( java_path )
if not exists ( target_path ) :
echo ( " -----> Building Java Application " )
call ( ' mvn package ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2019-08-10 21:00:40 +00:00
else :
2019-09-15 08:35:45 +00:00
echo ( " -----> Removing previous builds " )
echo ( " -----> Rebuilding Java Application " )
call ( ' mvn clean package ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2019-11-19 08:09:08 +00:00
2019-08-10 21:00:40 +00:00
return spawn_app ( app , deltas )
2019-11-20 08:28:02 +00:00
2023-07-20 13:28:48 +00:00
def deploy_clojure_cli ( app , deltas = { } ) :
""" Deploy a Clojure Application """
virtual = join ( ENV_ROOT , app )
target_path = join ( APP_ROOT , app , ' target ' )
env_file = join ( APP_ROOT , app , ' ENV ' )
if not exists ( target_path ) :
makedirs ( virtual )
env = {
' VIRTUAL_ENV ' : virtual ,
" PATH " : ' : ' . join ( [ join ( virtual , " bin " ) , join ( app , " .bin " ) , environ [ ' PATH ' ] ] ) ,
" CLJ_CONFIG " : environ . get ( ' CLJ_CONFIG ' , join ( environ [ ' HOME ' ] , ' .clojure ' ) ) ,
}
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
echo ( " -----> Building Clojure Application " )
call ( ' clojure -T:build release ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
return spawn_app ( app , deltas )
def deploy_clojure_leiningen ( app , deltas = { } ) :
2019-09-29 17:43:07 +00:00
""" Deploy a Clojure Application """
virtual = join ( ENV_ROOT , app )
target_path = join ( APP_ROOT , app , ' target ' )
env_file = join ( APP_ROOT , app , ' ENV ' )
if not exists ( target_path ) :
makedirs ( virtual )
env = {
' VIRTUAL_ENV ' : virtual ,
" PATH " : ' : ' . join ( [ join ( virtual , " bin " ) , join ( app , " .bin " ) , environ [ ' PATH ' ] ] ) ,
2019-11-20 08:28:02 +00:00
" LEIN_HOME " : environ . get ( ' LEIN_HOME ' , join ( environ [ ' HOME ' ] , ' .lein ' ) ) ,
2019-09-29 17:43:07 +00:00
}
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
echo ( " -----> Building Clojure Application " )
call ( ' lein clean ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
call ( ' lein uberjar ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
return spawn_app ( app , deltas )
2018-07-24 20:12:08 +00:00
2020-01-07 14:30:27 +00:00
def deploy_ruby ( app , deltas = { } ) :
""" Deploy a Ruby Application """
virtual = join ( ENV_ROOT , app )
env_file = join ( APP_ROOT , app , ' ENV ' )
env = {
' VIRTUAL_ENV ' : virtual ,
" PATH " : ' : ' . join ( [ join ( virtual , " bin " ) , join ( app , " .bin " ) , environ [ ' PATH ' ] ] ) ,
}
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
2020-01-07 15:02:11 +00:00
2022-03-06 13:12:53 +00:00
if not exists ( virtual ) :
2020-01-07 14:30:27 +00:00
echo ( " -----> Building Ruby Application " )
makedirs ( virtual )
2022-10-17 01:32:17 +00:00
call ( ' bundle config set --local path $VIRTUAL_ENV ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2020-01-07 14:30:27 +00:00
else :
echo ( " ------> Rebuilding Ruby Application " )
2022-03-06 13:12:53 +00:00
call ( ' bundle install ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2020-01-07 14:30:27 +00:00
return spawn_app ( app , deltas )
2018-07-24 20:12:08 +00:00
2016-04-06 22:56:49 +00:00
def deploy_go ( app , deltas = { } ) :
""" Deploy a Go application """
go_path = join ( ENV_ROOT , app )
deps = join ( APP_ROOT , app , ' Godeps ' )
first_time = False
if not exists ( go_path ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> Creating GOPATH for ' {} ' " . format ( app ) , fg = ' green ' )
2016-09-10 09:37:37 +00:00
makedirs ( go_path )
2019-11-19 08:09:08 +00:00
# copy across a pre-built GOPATH to save provisioning time
2018-12-23 14:14:54 +00:00
call ( ' cp -a $HOME/gopath {} ' . format ( app ) , cwd = ENV_ROOT , shell = True )
2016-04-06 22:56:49 +00:00
first_time = True
if exists ( deps ) :
if first_time or getmtime ( deps ) > getmtime ( go_path ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> Running godep for ' {} ' " . format ( app ) , fg = ' green ' )
2016-04-06 22:56:49 +00:00
env = {
' GOPATH ' : ' $HOME/gopath ' ,
' GOROOT ' : ' $HOME/go ' ,
' PATH ' : ' $PATH:$HOME/go/bin ' ,
' GO15VENDOREXPERIMENT ' : ' 1 '
}
call ( ' godep update ... ' , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2019-06-28 11:59:12 +00:00
return spawn_app ( app , deltas )
2016-04-06 22:56:49 +00:00
2018-06-24 20:54:29 +00:00
def deploy_node ( app , deltas = { } ) :
""" Deploy a Node application """
2019-07-13 15:29:34 +00:00
virtualenv_path = join ( ENV_ROOT , app )
2019-06-28 11:56:37 +00:00
node_path = join ( ENV_ROOT , app , " node_modules " )
2022-02-25 06:34:31 +00:00
node_modules_symlink = join ( APP_ROOT , app , " node_modules " )
npm_prefix = abspath ( join ( node_path , " .. " ) )
2018-06-24 22:02:56 +00:00
env_file = join ( APP_ROOT , app , ' ENV ' )
2019-06-28 11:56:37 +00:00
deps = join ( APP_ROOT , app , ' package.json ' )
2018-06-24 20:54:29 +00:00
first_time = False
2019-06-28 11:56:37 +00:00
if not exists ( node_path ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> Creating node_modules for ' {} ' " . format ( app ) , fg = ' green ' )
2019-06-28 11:56:37 +00:00
makedirs ( node_path )
2018-06-24 20:54:29 +00:00
first_time = True
2019-08-05 08:08:48 +00:00
env = {
' VIRTUAL_ENV ' : virtualenv_path ,
' NODE_PATH ' : node_path ,
2022-02-25 06:34:31 +00:00
' NPM_CONFIG_PREFIX ' : npm_prefix ,
2019-11-20 08:28:02 +00:00
" PATH " : ' : ' . join ( [ join ( virtualenv_path , " bin " ) , join ( node_path , " .bin " ) , environ [ ' PATH ' ] ] )
2019-08-05 08:08:48 +00:00
}
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
2019-11-13 17:56:07 +00:00
# include node binaries on our path
environ [ " PATH " ] = env [ " PATH " ]
2019-08-05 08:08:48 +00:00
version = env . get ( " NODE_VERSION " )
node_binary = join ( virtualenv_path , " bin " , " node " )
2019-11-20 08:28:02 +00:00
installed = check_output ( " {} -v " . format ( node_binary ) , cwd = join ( APP_ROOT , app ) , env = env , shell = True ) . decode ( " utf8 " ) . rstrip (
" \n " ) if exists ( node_binary ) else " "
2019-08-05 08:08:48 +00:00
if version and check_requirements ( [ ' nodeenv ' ] ) :
if not installed . endswith ( version ) :
started = glob ( join ( UWSGI_ENABLED , ' {} *.ini ' . format ( app ) ) )
if installed and len ( started ) :
echo ( " Warning: Can ' t update node with app running. Stop the app & retry. " , fg = ' yellow ' )
else :
2019-07-13 15:29:34 +00:00
echo ( " -----> Installing node version ' {NODE_VERSION:s} ' using nodeenv " . format ( * * env ) , fg = ' green ' )
2019-11-20 08:28:02 +00:00
call ( " nodeenv --prebuilt --node= {NODE_VERSION:s} --clean-src --force {VIRTUAL_ENV:s} " . format ( * * env ) ,
cwd = virtualenv_path , env = env , shell = True )
2019-08-05 08:08:48 +00:00
else :
echo ( " -----> Node is installed at {} . " . format ( version ) )
if exists ( deps ) and check_requirements ( [ ' npm ' ] ) :
if first_time or getmtime ( deps ) > getmtime ( node_path ) :
2022-02-25 06:34:31 +00:00
copyfile ( join ( APP_ROOT , app , ' package.json ' ) , join ( ENV_ROOT , app , ' package.json ' ) )
if not exists ( node_modules_symlink ) :
symlink ( node_path , node_modules_symlink )
2019-07-13 15:29:34 +00:00
echo ( " -----> Running npm for ' {} ' " . format ( app ) , fg = ' green ' )
2022-02-25 06:34:31 +00:00
call ( ' npm install --prefix {} --package-lock=false ' . format ( npm_prefix ) , cwd = join ( APP_ROOT , app ) , env = env , shell = True )
2019-06-28 11:59:12 +00:00
return spawn_app ( app , deltas )
2018-06-24 20:54:29 +00:00
2016-04-06 10:59:42 +00:00
def deploy_python ( app , deltas = { } ) :
2016-03-28 18:28:24 +00:00
""" Deploy a Python application """
2019-11-19 08:09:08 +00:00
2016-04-01 23:16:10 +00:00
virtualenv_path = join ( ENV_ROOT , app )
2016-04-01 21:53:08 +00:00
requirements = join ( APP_ROOT , app , ' requirements.txt ' )
2016-11-15 11:04:15 +00:00
env_file = join ( APP_ROOT , app , ' ENV ' )
2023-02-02 22:31:27 +00:00
# Set unbuffered output and readable UTF-8 mapping
env = {
' PYTHONUNBUFFERED ' : ' 1 ' ,
' PYTHONIOENCODING ' : ' UTF_8:replace '
}
2016-11-15 11:04:15 +00:00
if exists ( env_file ) :
env . update ( parse_settings ( env_file , env ) )
2018-12-23 13:59:48 +00:00
# TODO: improve version parsing
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2018-06-14 21:16:09 +00:00
version = int ( env . get ( " PYTHON_VERSION " , " 3 " ) )
2016-04-01 21:50:18 +00:00
2016-04-01 23:22:40 +00:00
first_time = False
2020-01-20 04:23:47 +00:00
if not exists ( join ( virtualenv_path , " bin " , " activate " ) ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> Creating virtualenv for ' {} ' " . format ( app ) , fg = ' green ' )
2020-01-20 04:23:47 +00:00
try :
makedirs ( virtualenv_path )
except FileExistsError :
echo ( " -----> Env dir already exists: ' {} ' " . format ( app ) , fg = ' yellow ' )
2018-12-23 13:59:48 +00:00
call ( ' virtualenv --python=python {version:d} {app:s} ' . format ( * * locals ( ) ) , cwd = ENV_ROOT , shell = True )
2016-04-01 23:22:40 +00:00
first_time = True
2016-03-28 18:28:24 +00:00
2019-11-20 08:28:02 +00:00
activation_script = join ( virtualenv_path , ' bin ' , ' activate_this.py ' )
2018-06-14 21:03:32 +00:00
exec ( open ( activation_script ) . read ( ) , dict ( __file__ = activation_script ) )
2016-04-06 10:59:42 +00:00
2016-04-01 23:22:40 +00:00
if first_time or getmtime ( requirements ) > getmtime ( virtualenv_path ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> Running pip for ' {} ' " . format ( app ) , fg = ' green ' )
call ( ' pip install -r {} ' . format ( requirements ) , cwd = virtualenv_path , shell = True )
2019-06-28 11:59:12 +00:00
return spawn_app ( app , deltas )
2016-03-31 22:26:52 +00:00
2019-08-31 08:38:14 +00:00
2019-08-30 10:07:55 +00:00
def deploy_identity ( app , deltas = { } ) :
env_path = join ( ENV_ROOT , app )
if not exists ( env_path ) :
makedirs ( env_path )
return spawn_app ( app , deltas )
2019-08-31 08:38:14 +00:00
2016-04-02 21:32:10 +00:00
def spawn_app ( app , deltas = { } ) :
2016-03-31 22:42:13 +00:00
""" Create all workers for an app """
2019-11-19 08:09:08 +00:00
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2016-04-02 21:32:10 +00:00
app_path = join ( APP_ROOT , app )
procfile = join ( app_path , ' Procfile ' )
workers = parse_procfile ( procfile )
2023-12-26 17:28:19 +00:00
workers . pop ( " preflight " , None )
2019-06-23 14:48:47 +00:00
workers . pop ( " release " , None )
2019-11-20 08:28:02 +00:00
ordinals = defaultdict ( lambda : 1 )
worker_count = { k : 1 for k in workers . keys ( ) }
2016-04-02 21:32:10 +00:00
2016-04-01 21:50:18 +00:00
# the Python virtualenv
2016-04-01 23:16:10 +00:00
virtualenv_path = join ( ENV_ROOT , app )
2016-04-01 21:50:18 +00:00
# Settings shipped with the app
env_file = join ( APP_ROOT , app , ' ENV ' )
# Custom overrides
2016-04-01 23:16:10 +00:00
settings = join ( ENV_ROOT , app , ' ENV ' )
2016-04-02 15:46:03 +00:00
# Live settings
live = join ( ENV_ROOT , app , ' LIVE_ENV ' )
2016-04-02 16:38:53 +00:00
# Scaling
scaling = join ( ENV_ROOT , app , ' SCALING ' )
2016-04-25 12:23:00 +00:00
2016-04-03 10:35:39 +00:00
# Bootstrap environment
2016-03-31 22:39:29 +00:00
env = {
2016-04-25 09:35:34 +00:00
' APP ' : app ,
' LOG_ROOT ' : LOG_ROOT ,
2016-09-10 09:37:37 +00:00
' HOME ' : environ [ ' HOME ' ] ,
' USER ' : environ [ ' USER ' ] ,
2019-11-20 08:28:02 +00:00
' PATH ' : ' : ' . join ( [ join ( virtualenv_path , ' bin ' ) , environ [ ' PATH ' ] ] ) ,
2016-04-01 23:29:59 +00:00
' PWD ' : dirname ( env_file ) ,
2016-04-25 10:50:37 +00:00
' VIRTUAL_ENV ' : virtualenv_path ,
2016-03-31 22:39:29 +00:00
}
2018-12-23 13:59:48 +00:00
safe_defaults = {
' NGINX_IPV4_ADDRESS ' : ' 0.0.0.0 ' ,
' NGINX_IPV6_ADDRESS ' : ' [::] ' ,
' BIND_ADDRESS ' : ' 127.0.0.1 ' ,
}
2019-06-28 11:56:37 +00:00
# add node path if present
node_path = join ( virtualenv_path , " node_modules " )
if exists ( node_path ) :
env [ " NODE_PATH " ] = node_path
2019-11-20 08:28:02 +00:00
env [ " PATH " ] = ' : ' . join ( [ join ( node_path , " .bin " ) , env [ ' PATH ' ] ] )
2019-06-28 11:56:37 +00:00
2016-03-31 22:39:29 +00:00
# Load environment variables shipped with repo (if any)
if exists ( env_file ) :
2016-04-01 23:51:49 +00:00
env . update ( parse_settings ( env_file , env ) )
2019-08-05 08:09:22 +00:00
2016-03-31 22:39:29 +00:00
# Override with custom settings (if any)
if exists ( settings ) :
2020-02-04 15:20:35 +00:00
env . update ( parse_settings ( settings , env ) ) # lgtm [py/modification-of-default-value]
2016-04-25 12:23:00 +00:00
2022-03-06 13:16:17 +00:00
if ' web ' in workers or ' wsgi ' in workers or ' jwsgi ' in workers or ' static ' in workers or ' rwsgi ' in workers :
2018-12-23 15:07:52 +00:00
# Pick a port if none defined
2018-08-22 18:18:57 +00:00
if ' PORT ' not in env :
2016-05-03 21:27:23 +00:00
env [ ' PORT ' ] = str ( get_free_port ( ) )
2018-12-23 15:07:52 +00:00
echo ( " -----> picking free port {PORT} " . format ( * * env ) )
2023-01-07 18:05:02 +00:00
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' DISABLE_IPV6 ' , ' false ' ) ) :
2022-12-19 20:14:59 +00:00
safe_defaults . pop ( ' NGINX_IPV6_ADDRESS ' , None )
echo ( " -----> nginx will NOT use IPv6 " . format ( * * locals ( ) ) )
2016-05-03 21:27:23 +00:00
2019-11-19 08:09:08 +00:00
# Safe defaults for addressing
2018-12-23 13:59:48 +00:00
for k , v in safe_defaults . items ( ) :
if k not in env :
2022-12-19 20:14:59 +00:00
echo ( " -----> nginx {k:s} will be set to {v} " . format ( * * locals ( ) ) )
2018-12-23 13:59:48 +00:00
env [ k ] = v
2019-11-19 08:09:08 +00:00
2016-05-14 17:31:02 +00:00
# Set up nginx if we have NGINX_SERVER_NAME set
if ' NGINX_SERVER_NAME ' in env :
2022-05-07 13:14:59 +00:00
# Hack to get around ClickCommand
env [ ' NGINX_SERVER_NAME ' ] = env [ ' NGINX_SERVER_NAME ' ] . split ( ' , ' )
env [ ' NGINX_SERVER_NAME ' ] = ' ' . join ( env [ ' NGINX_SERVER_NAME ' ] )
2016-05-03 21:27:23 +00:00
nginx = command_output ( " nginx -V " )
nginx_ssl = " 443 ssl "
if " --with-http_v2_module " in nginx :
nginx_ssl + = " http2 "
2019-11-20 08:28:02 +00:00
elif " --with-http_spdy_module " in nginx and " nginx/1.6.2 " not in nginx : # avoid Raspbian bug
2016-05-03 21:27:23 +00:00
nginx_ssl + = " spdy "
2019-11-20 08:28:02 +00:00
nginx_conf = join ( NGINX_ROOT , " {} .conf " . format ( app ) )
2019-11-19 08:09:08 +00:00
2020-02-04 15:30:11 +00:00
env . update ( { # lgtm [py/modification-of-default-value]
2016-05-03 21:27:23 +00:00
' NGINX_SSL ' : nginx_ssl ,
' NGINX_ROOT ' : NGINX_ROOT ,
2019-06-23 14:47:58 +00:00
' ACME_WWW ' : ACME_WWW ,
2016-05-03 21:27:23 +00:00
} )
2019-11-19 08:09:08 +00:00
2018-12-23 15:07:52 +00:00
# default to reverse proxying to the TCP port we picked
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_UWSGI_SETTINGS ' ] = ' proxy_pass http:// {BIND_ADDRESS:s} : {PORT:s} ; ' . format ( * * env )
2019-08-18 08:36:05 +00:00
if ' wsgi ' in workers or ' jwsgi ' in workers :
2018-12-23 13:59:48 +00:00
sock = join ( NGINX_ROOT , " {} .sock " . format ( app ) )
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_UWSGI_SETTINGS ' ] = expandvars ( PIKU_INTERNAL_NGINX_UWSGI_SETTINGS , env )
2016-05-03 21:27:23 +00:00
env [ ' NGINX_SOCKET ' ] = env [ ' BIND_ADDRESS ' ] = " unix:// " + sock
2016-05-05 20:09:41 +00:00
if ' PORT ' in env :
del env [ ' PORT ' ]
2016-05-03 21:27:23 +00:00
else :
2019-11-19 08:09:08 +00:00
env [ ' NGINX_SOCKET ' ] = " {BIND_ADDRESS:s} : {PORT:s} " . format ( * * env )
2018-12-23 13:59:48 +00:00
echo ( " -----> nginx will look for app ' {} ' on {} " . format ( app , env [ ' NGINX_SOCKET ' ] ) )
2018-08-22 18:18:57 +00:00
2021-09-23 09:10:26 +00:00
domains = env [ ' NGINX_SERVER_NAME ' ] . split ( )
domain = domains [ 0 ]
issuefile = join ( ACME_ROOT , domain , " issued- " + " - " . join ( domains ) )
2019-11-20 08:28:02 +00:00
key , crt = [ join ( NGINX_ROOT , " {} . {} " . format ( app , x ) ) for x in [ ' key ' , ' crt ' ] ]
2019-06-23 14:47:58 +00:00
if exists ( join ( ACME_ROOT , " acme.sh " ) ) :
acme = ACME_ROOT
www = ACME_WWW
2022-03-07 09:01:13 +00:00
root_ca = ACME_ROOT_CA
2019-06-23 14:47:58 +00:00
# if this is the first run there will be no nginx conf yet
# create a basic conf stub just to serve the acme auth
if not exists ( nginx_conf ) :
echo ( " -----> writing temporary nginx conf " )
buffer = expandvars ( NGINX_ACME_FIRSTRUN_TEMPLATE , env )
with open ( nginx_conf , " w " ) as h :
h . write ( buffer )
2021-09-23 09:10:26 +00:00
if not exists ( key ) or not exists ( issuefile ) :
2019-06-23 14:47:58 +00:00
echo ( " -----> getting letsencrypt certificate " )
2021-09-23 09:10:26 +00:00
certlist = " " . join ( [ " -d {} " . format ( d ) for d in domains ] )
2022-04-26 06:35:41 +00:00
call ( ' {acme:s} /acme.sh --issue {certlist:s} -w {www:s} --server {root_ca:s} ' . format ( * * locals ( ) ) , shell = True )
2021-09-23 09:10:26 +00:00
call ( ' {acme:s} /acme.sh --install-cert {certlist:s} --key-file {key:s} --fullchain-file {crt:s} ' . format (
2019-11-20 08:28:02 +00:00
* * locals ( ) ) , shell = True )
2019-06-28 11:55:34 +00:00
if exists ( join ( ACME_ROOT , domain ) ) and not exists ( join ( ACME_WWW , app ) ) :
symlink ( join ( ACME_ROOT , domain ) , join ( ACME_WWW , app ) )
2021-09-23 09:10:26 +00:00
try :
symlink ( " /dev/null " , issuefile )
except Exception :
pass
2019-06-23 14:47:58 +00:00
else :
echo ( " -----> letsencrypt certificate already installed " )
# fall back to creating self-signed certificate if acme failed
2019-06-27 06:41:07 +00:00
if not exists ( key ) or stat ( crt ) . st_size == 0 :
2019-06-23 14:47:58 +00:00
echo ( " -----> generating self-signed certificate " )
2019-11-20 08:28:02 +00:00
call (
' openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj " /C=US/ST=NY/L=New York/O=Piku/OU=Self-Signed/CN= {domain:s} " -keyout {key:s} -out {crt:s} ' . format (
* * locals ( ) ) , shell = True )
2019-11-19 08:09:08 +00:00
2016-05-05 19:00:25 +00:00
# restrict access to server from CloudFlare IP addresses
2016-05-05 19:21:33 +00:00
acl = [ ]
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' NGINX_CLOUDFLARE_ACL ' , ' false ' ) ) :
2016-05-05 19:00:25 +00:00
try :
2019-06-25 10:46:55 +00:00
cf = loads ( urlopen ( ' https://api.cloudflare.com/client/v4/ips ' ) . read ( ) . decode ( " utf-8 " ) )
2019-11-20 19:37:37 +00:00
if cf [ ' success ' ] is True :
2019-06-25 10:46:55 +00:00
for i in cf [ ' result ' ] [ ' ipv4_cidrs ' ] :
acl . append ( " allow {} ; " . format ( i ) )
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' DISABLE_IPV6 ' , ' false ' ) ) :
2022-12-19 20:14:59 +00:00
for i in cf [ ' result ' ] [ ' ipv6_cidrs ' ] :
acl . append ( " allow {} ; " . format ( i ) )
2019-06-25 10:46:55 +00:00
# allow access from controlling machine
if ' SSH_CLIENT ' in environ :
remote_ip = environ [ ' SSH_CLIENT ' ] . split ( ) [ 0 ]
2022-12-30 18:35:13 +00:00
echo ( " -----> nginx ACL will include your IP ( {} ) " . format ( remote_ip ) )
2019-06-25 10:46:55 +00:00
acl . append ( " allow {} ; " . format ( remote_ip ) )
2019-11-20 08:28:02 +00:00
acl . extend ( [ " allow 127.0.0.1; " , " deny all; " ] )
2018-12-23 15:31:55 +00:00
except Exception :
2016-05-05 19:00:25 +00:00
cf = defaultdict ( )
2018-12-23 15:31:55 +00:00
echo ( " -----> Could not retrieve CloudFlare IP ranges: {} " . format ( format_exc ( ) ) , fg = " red " )
2019-08-12 05:56:59 +00:00
2016-09-09 23:46:04 +00:00
env [ ' NGINX_ACL ' ] = " " . join ( acl )
2016-10-02 17:40:02 +00:00
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_BLOCK_GIT ' ] = " " if env . get ( ' NGINX_ALLOW_GIT_FOLDERS ' ) else " location ~ / \ .git { deny all; } "
2019-08-05 08:09:22 +00:00
2022-12-27 15:11:31 +00:00
env [ ' PIKU_INTERNAL_PROXY_CACHE_PATH ' ] = ' '
env [ ' PIKU_INTERNAL_NGINX_CACHE_MAPPINGS ' ] = ' '
# Get a mapping of /prefix1,/prefix2
default_cache_path = join ( CACHE_ROOT , app )
if not exists ( default_cache_path ) :
makedirs ( default_cache_path )
try :
cache_size = int ( env . get ( ' NGINX_CACHE_SIZE ' , ' 1 ' ) )
2023-01-07 18:05:02 +00:00
except Exception :
2022-12-27 15:11:31 +00:00
echo ( " =====> Invalid cache size, defaulting to 1GB " )
2022-12-28 20:05:21 +00:00
cache_size = 1
2022-12-27 15:11:31 +00:00
cache_size = str ( cache_size ) + " g "
try :
2022-12-30 18:35:13 +00:00
cache_time_control = int ( env . get ( ' NGINX_CACHE_CONTROL ' , ' 3600 ' ) )
2023-01-07 18:05:02 +00:00
except Exception :
2022-12-30 18:35:13 +00:00
echo ( " =====> Invalid time for cache control, defaulting to 3600s " )
cache_time_control = 3600
cache_time_control = str ( cache_time_control )
2022-12-28 20:05:21 +00:00
try :
2022-12-30 18:35:13 +00:00
cache_time_content = int ( env . get ( ' NGINX_CACHE_TIME ' , ' 3600 ' ) )
2023-01-07 18:05:02 +00:00
except Exception :
2022-12-30 18:35:13 +00:00
echo ( " =====> Invalid cache time for content, defaulting to 3600s " )
cache_time_content = 3600
cache_time_content = str ( cache_time_content ) + " s "
try :
cache_time_redirects = int ( env . get ( ' NGINX_CACHE_REDIRECTS ' , ' 3600 ' ) )
2023-01-07 18:05:02 +00:00
except Exception :
2022-12-30 18:35:13 +00:00
echo ( " =====> Invalid cache time for redirects, defaulting to 3600s " )
cache_time_redirects = 3600
cache_time_redirects = str ( cache_time_redirects ) + " s "
try :
2022-12-30 18:40:20 +00:00
cache_time_any = int ( env . get ( ' NGINX_CACHE_ANY ' , ' 3600 ' ) )
2023-01-07 18:05:02 +00:00
except Exception :
2022-12-30 18:40:20 +00:00
echo ( " =====> Invalid cache expiry fallback, defaulting to 3600s " )
cache_time_any = 3600
2022-12-30 18:35:13 +00:00
cache_time_any = str ( cache_time_any ) + " s "
try :
cache_time_expiry = int ( env . get ( ' NGINX_CACHE_EXPIRY ' , ' 86400 ' ) )
2023-01-07 18:05:02 +00:00
except Exception :
2022-12-30 18:35:13 +00:00
echo ( " =====> Invalid cache expiry, defaulting to 86400s " )
cache_time_expiry = 86400
cache_time_expiry = str ( cache_time_expiry ) + " s "
2022-12-27 15:11:31 +00:00
cache_prefixes = env . get ( ' NGINX_CACHE_PREFIXES ' , ' ' )
cache_path = env . get ( ' NGINX_CACHE_PATH ' , default_cache_path )
if not exists ( cache_path ) :
echo ( " =====> Cache path {} does not exist, using default {} , be aware of disk usage. " . format ( cache_path , default_cache_path ) )
cache_path = env . get ( default_cache_path )
if len ( cache_prefixes ) :
2023-01-07 18:05:02 +00:00
prefixes = [ ] # this will turn into part of /(path1|path2|path3)
2022-12-27 15:11:31 +00:00
try :
items = cache_prefixes . split ( ' , ' )
for item in items :
if item [ 0 ] == ' / ' :
prefixes . append ( item [ 1 : ] )
else :
prefixes . append ( item )
cache_prefixes = " | " . join ( prefixes )
2022-12-30 18:35:13 +00:00
echo ( " -----> nginx will cache /( {} ) prefixes up to {} or {} of disk space, with the following timings: " . format ( cache_prefixes , cache_time_expiry , cache_size ) )
echo ( " -----> nginx will cache content for {} . " . format ( cache_time_content ) )
echo ( " -----> nginx will cache redirects for {} . " . format ( cache_time_redirects ) )
echo ( " -----> nginx will cache everything else for {} . " . format ( cache_time_any ) )
echo ( " -----> nginx will send caching headers asking for {} seconds of public caching. " . format ( cache_time_control ) )
2022-12-27 15:11:31 +00:00
env [ ' PIKU_INTERNAL_PROXY_CACHE_PATH ' ] = expandvars (
PIKU_INTERNAL_PROXY_CACHE_PATH , locals ( ) )
env [ ' PIKU_INTERNAL_NGINX_CACHE_MAPPINGS ' ] = expandvars (
PIKU_INTERNAL_NGINX_CACHE_MAPPING , locals ( ) )
env [ ' PIKU_INTERNAL_NGINX_CACHE_MAPPINGS ' ] = expandvars (
env [ ' PIKU_INTERNAL_NGINX_CACHE_MAPPINGS ' ] , env )
except Exception as e :
echo ( " Error {} in cache path spec: should be /prefix1:[,/prefix2], ignoring. " . format ( e ) )
env [ ' PIKU_INTERNAL_NGINX_CACHE_MAPPINGS ' ] = ' '
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_STATIC_MAPPINGS ' ] = ' '
2019-08-30 10:07:55 +00:00
2022-12-27 15:11:31 +00:00
# Get a mapping of /prefix1:path1,/prefix2:path2
2019-11-20 08:28:02 +00:00
static_paths = env . get ( ' NGINX_STATIC_PATHS ' , ' ' )
2019-08-30 10:07:55 +00:00
# prepend static worker path if present
if ' static ' in workers :
stripped = workers [ ' static ' ] . strip ( " / " ) . rstrip ( " / " )
2023-01-05 05:14:55 +00:00
static_paths = ( " / " if stripped [ 0 : 1 ] == " : " else " /: " ) + ( stripped if stripped else " . " ) + " / " + ( " , " if static_paths else " " ) + static_paths
2016-10-02 17:40:02 +00:00
if len ( static_paths ) :
try :
items = static_paths . split ( ' , ' )
for item in items :
static_url , static_path = item . split ( ' : ' )
if static_path [ 0 ] != ' / ' :
2023-01-10 03:05:14 +00:00
static_path = join ( app_path , static_path ) . rstrip ( " / " ) + " / "
2022-12-27 15:11:31 +00:00
echo ( " -----> nginx will map {} to {} . " . format ( static_url , static_path ) )
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_STATIC_MAPPINGS ' ] = env [ ' PIKU_INTERNAL_NGINX_STATIC_MAPPINGS ' ] + expandvars (
PIKU_INTERNAL_NGINX_STATIC_MAPPING , locals ( ) )
2016-10-02 17:40:02 +00:00
except Exception as e :
2022-12-27 15:11:31 +00:00
echo ( " Error {} in static path spec: should be /prefix1:path1[,/prefix2:path2], ignoring. " . format ( e ) )
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_STATIC_MAPPINGS ' ] = ' '
2016-10-02 17:40:02 +00:00
2021-12-07 09:51:37 +00:00
env [ ' PIKU_INTERNAL_NGINX_CUSTOM_CLAUSES ' ] = expandvars ( open ( join ( app_path , env [ " NGINX_INCLUDE_FILE " ] ) ) . read ( ) , env ) if env . get ( " NGINX_INCLUDE_FILE " ) else " "
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_PORTMAP ' ] = " "
2022-03-06 13:16:17 +00:00
if ' web ' in workers or ' wsgi ' in workers or ' jwsgi ' in workers or ' rwsgi ' in workers :
2021-12-07 09:23:34 +00:00
env [ ' PIKU_INTERNAL_NGINX_PORTMAP ' ] = expandvars ( NGINX_PORTMAP_FRAGMENT , env )
env [ ' PIKU_INTERNAL_NGINX_COMMON ' ] = expandvars ( NGINX_COMMON_FRAGMENT , env )
2019-08-05 08:09:11 +00:00
2021-09-23 09:10:26 +00:00
echo ( " -----> nginx will map app ' {} ' to hostname(s) ' {} ' " . format ( app , env [ ' NGINX_SERVER_NAME ' ] ) )
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' NGINX_HTTPS_ONLY ' , ' false ' ) ) :
2018-08-29 15:51:04 +00:00
buffer = expandvars ( NGINX_HTTPS_ONLY_TEMPLATE , env )
2021-09-23 09:10:26 +00:00
echo ( " -----> nginx will redirect all requests to hostname(s) ' {} ' to HTTPS " . format ( env [ ' NGINX_SERVER_NAME ' ] ) )
2018-08-29 15:51:04 +00:00
else :
buffer = expandvars ( NGINX_TEMPLATE , env )
2022-12-19 20:14:59 +00:00
# remove all references to IPv6 listeners (for enviroments where it's disabled)
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' DISABLE_IPV6 ' , ' false ' ) ) :
2022-12-19 20:14:59 +00:00
buffer = ' \n ' . join ( [ line for line in buffer . split ( ' \n ' ) if ' NGINX_IPV6 ' not in line ] )
2022-12-27 15:11:31 +00:00
# change any unecessary uWSGI specific directives to standard proxy ones
if ' wsgi ' not in workers and ' jwsgi ' not in workers :
buffer = buffer . replace ( " uwsgi_ " , " proxy_ " )
2023-01-18 03:33:15 +00:00
2023-01-16 23:16:22 +00:00
# map Cloudflare connecting IP to REMOTE_ADDR
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' NGINX_CLOUDFLARE_ACL ' , ' false ' ) ) :
2023-01-16 23:16:22 +00:00
buffer = buffer . replace ( " REMOTE_ADDR $remote_addr " , " REMOTE_ADDR $http_cf_connecting_ip " )
2022-12-19 20:14:59 +00:00
2019-06-23 14:47:58 +00:00
with open ( nginx_conf , " w " ) as h :
2017-10-07 11:41:07 +00:00
h . write ( buffer )
2019-07-14 16:12:43 +00:00
# prevent broken config from breaking other deploys
try :
nginx_config_test = str ( check_output ( " nginx -t 2>&1 | grep {} " . format ( app ) , env = environ , shell = True ) )
2019-11-20 19:37:37 +00:00
except Exception :
2019-07-14 16:12:43 +00:00
nginx_config_test = None
if nginx_config_test :
echo ( " Error: [nginx config] {} " . format ( nginx_config_test ) , fg = ' red ' )
echo ( " Warning: removing broken nginx config. " , fg = ' yellow ' )
unlink ( nginx_conf )
2016-04-25 12:23:00 +00:00
2016-04-02 21:32:10 +00:00
# Configured worker count
2016-04-02 16:38:53 +00:00
if exists ( scaling ) :
2019-11-20 08:28:02 +00:00
worker_count . update ( { k : int ( v ) for k , v in parse_procfile ( scaling ) . items ( ) if k in workers } )
2019-11-19 08:09:08 +00:00
2016-04-02 21:32:10 +00:00
to_create = { }
2019-11-19 08:09:08 +00:00
to_destroy = { }
2018-06-14 21:03:32 +00:00
for k , v in worker_count . items ( ) :
2019-11-20 08:28:02 +00:00
to_create [ k ] = range ( 1 , worker_count [ k ] + 1 )
2016-04-02 22:37:42 +00:00
if k in deltas and deltas [ k ] :
2016-04-02 21:32:10 +00:00
to_create [ k ] = range ( 1 , worker_count [ k ] + deltas [ k ] + 1 )
2016-04-02 22:37:42 +00:00
if deltas [ k ] < 0 :
to_destroy [ k ] = range ( worker_count [ k ] , worker_count [ k ] + deltas [ k ] , - 1 )
2019-11-20 08:28:02 +00:00
worker_count [ k ] = worker_count [ k ] + deltas [ k ]
2016-04-02 22:37:42 +00:00
2017-10-07 11:41:07 +00:00
# Cleanup env
2018-06-14 21:26:53 +00:00
for k , v in list ( env . items ( ) ) :
2021-12-07 09:23:34 +00:00
if k . startswith ( ' PIKU_INTERNAL_ ' ) :
2017-10-07 11:41:07 +00:00
del env [ k ]
2016-04-02 22:37:42 +00:00
# Save current settings
write_config ( live , env )
write_config ( scaling , worker_count , ' : ' )
2019-11-19 08:09:08 +00:00
2023-03-25 04:30:29 +00:00
if get_boolean ( env . get ( ' PIKU_AUTO_RESTART ' , ' true ' ) ) :
2019-06-27 06:40:52 +00:00
config = glob ( join ( UWSGI_ENABLED , ' {} *.ini ' . format ( app ) ) )
if len ( config ) :
echo ( " -----> Removing uwsgi configs to trigger auto-restart. " )
for c in config :
remove ( c )
2016-04-02 22:37:42 +00:00
# Create new workers
2018-06-14 21:03:32 +00:00
for k , v in to_create . items ( ) :
2016-04-02 22:37:42 +00:00
for w in v :
2018-12-23 13:59:48 +00:00
enabled = join ( UWSGI_ENABLED , ' {app:s} _ {k:s} . {w:d} .ini ' . format ( * * locals ( ) ) )
2016-04-02 22:37:42 +00:00
if not exists ( enabled ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> spawning ' {app:s} : {k:s} . {w:d} ' " . format ( * * locals ( ) ) , fg = ' green ' )
2016-04-02 22:37:42 +00:00
spawn_worker ( app , k , workers [ k ] , env , w )
2019-11-19 08:09:08 +00:00
2016-04-02 21:32:10 +00:00
# Remove unnecessary workers (leave logfiles)
2018-06-14 21:03:32 +00:00
for k , v in to_destroy . items ( ) :
2020-02-04 15:20:35 +00:00
for w in v : # lgtm [py/unused-loop-variable]
2018-12-23 13:59:48 +00:00
enabled = join ( UWSGI_ENABLED , ' {app:s} _ {k:s} . {w:d} .ini ' . format ( * * locals ( ) ) )
2016-04-02 21:32:10 +00:00
if exists ( enabled ) :
2018-12-23 13:59:48 +00:00
echo ( " -----> terminating ' {app:s} : {k:s} . {w:d} ' " . format ( * * locals ( ) ) , fg = ' yellow ' )
2016-09-10 09:37:37 +00:00
unlink ( enabled )
2019-06-28 11:59:12 +00:00
return env
2019-11-19 08:09:08 +00:00
2016-03-31 22:26:52 +00:00
2016-04-02 21:32:10 +00:00
def spawn_worker ( app , kind , command , env , ordinal = 1 ) :
2016-03-31 22:42:13 +00:00
""" Set up and deploy a single worker of a given kind """
2019-11-19 08:09:08 +00:00
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2016-05-03 21:18:03 +00:00
env [ ' PROC_TYPE ' ] = kind
2016-03-31 22:26:52 +00:00
env_path = join ( ENV_ROOT , app )
2018-12-23 13:59:48 +00:00
available = join ( UWSGI_AVAILABLE , ' {app:s} _ {kind:s} . {ordinal:d} .ini ' . format ( * * locals ( ) ) )
enabled = join ( UWSGI_ENABLED , ' {app:s} _ {kind:s} . {ordinal:d} .ini ' . format ( * * locals ( ) ) )
log_file = join ( LOG_ROOT , app , kind )
2016-03-28 18:28:24 +00:00
2016-03-29 19:24:17 +00:00
settings = [
2019-11-20 08:28:02 +00:00
( ' chdir ' , join ( APP_ROOT , app ) ) ,
2019-11-25 19:44:36 +00:00
( ' uid ' , getpwuid ( getuid ( ) ) . pw_name ) ,
( ' gid ' , getgrgid ( getgid ( ) ) . gr_name ) ,
2019-11-20 08:28:02 +00:00
( ' master ' , ' true ' ) ,
( ' project ' , app ) ,
( ' max-requests ' , env . get ( ' UWSGI_MAX_REQUESTS ' , ' 1024 ' ) ) ,
( ' listen ' , env . get ( ' UWSGI_LISTEN ' , ' 16 ' ) ) ,
( ' processes ' , env . get ( ' UWSGI_PROCESSES ' , ' 1 ' ) ) ,
2023-01-17 21:49:31 +00:00
( ' procname-prefix ' , ' {app:s} : {kind:s} : ' . format ( * * locals ( ) ) ) ,
2019-11-20 08:28:02 +00:00
( ' enable-threads ' , env . get ( ' UWSGI_ENABLE_THREADS ' , ' true ' ) . lower ( ) ) ,
2016-05-30 20:37:37 +00:00
( ' log-x-forwarded-for ' , env . get ( ' UWSGI_LOG_X_FORWARDED_FOR ' , ' false ' ) . lower ( ) ) ,
2019-11-20 08:28:02 +00:00
( ' log-maxsize ' , env . get ( ' UWSGI_LOG_MAXSIZE ' , UWSGI_LOG_MAXSIZE ) ) ,
2019-11-25 19:44:36 +00:00
( ' logfile-chown ' , ' %s : %s ' % ( getpwuid ( getuid ( ) ) . pw_name , getgrgid ( getgid ( ) ) . gr_name ) ) ,
( ' logfile-chmod ' , ' 640 ' ) ,
2019-11-26 15:40:47 +00:00
( ' logto2 ' , ' {log_file:s} . {ordinal:d} .log ' . format ( * * locals ( ) ) ) ,
2019-11-20 08:28:02 +00:00
( ' log-backupname ' , ' {log_file:s} . {ordinal:d} .log.old ' . format ( * * locals ( ) ) ) ,
2016-03-29 19:24:17 +00:00
]
2019-11-25 19:49:49 +00:00
2019-06-27 06:42:54 +00:00
# only add virtualenv to uwsgi if it's a real virtualenv
2019-07-13 15:29:34 +00:00
if exists ( join ( env_path , " bin " , " activate_this.py " ) ) :
2019-06-27 06:42:54 +00:00
settings . append ( ( ' virtualenv ' , env_path ) )
2021-02-27 13:16:40 +00:00
2021-02-27 13:07:20 +00:00
if ' UWSGI_IDLE ' in env :
try :
idle_timeout = int ( env [ ' UWSGI_IDLE ' ] )
settings . extend ( [
( ' idle ' , str ( idle_timeout ) ) ,
( ' cheap ' , ' True ' ) ,
( ' die-on-idle ' , ' True ' )
] )
echo ( " -----> uwsgi will start workers on demand and kill them after {} s of inactivity " . format ( idle_timeout ) , fg = ' yellow ' )
2021-02-27 13:12:25 +00:00
except Exception :
2021-02-27 13:07:20 +00:00
echo ( " Error: malformed setting ' UWSGI_IDLE ' , ignoring it. " . format ( ) , fg = ' red ' )
pass
2021-02-27 13:16:40 +00:00
2021-03-07 19:40:52 +00:00
if kind == ' cron ' :
settings . extend ( [
2021-03-07 19:50:16 +00:00
[ ' cron ' , command . replace ( " */ " , " - " ) . replace ( " * " , " -1 " ) ] ,
2021-03-07 19:40:52 +00:00
] )
2019-11-20 08:28:02 +00:00
if kind == ' jwsgi ' :
2019-08-10 21:00:40 +00:00
settings . extend ( [
( ' module ' , command ) ,
2019-11-20 08:28:02 +00:00
( ' threads ' , env . get ( ' UWSGI_THREADS ' , ' 4 ' ) ) ,
2019-08-10 21:00:40 +00:00
( ' plugin ' , ' jvm ' ) ,
( ' plugin ' , ' jwsgi ' )
] )
2020-01-07 15:02:11 +00:00
# could not come up with a better kind for ruby, web would work but that means loading the rack plugin in web.
if kind == ' rwsgi ' :
2020-01-07 14:30:27 +00:00
settings . extend ( [
( ' module ' , command ) ,
( ' threads ' , env . get ( ' UWSGI_THREADS ' , ' 4 ' ) ) ,
( ' plugin ' , ' rack ' ) ,
( ' plugin ' , ' rbrequire ' ) ,
( ' plugin ' , ' post-buffering ' )
] )
2019-08-10 21:00:40 +00:00
2019-11-20 08:28:02 +00:00
python_version = int ( env . get ( ' PYTHON_VERSION ' , ' 3 ' ) )
2016-11-15 11:04:15 +00:00
2016-03-31 22:26:52 +00:00
if kind == ' wsgi ' :
settings . extend ( [
2019-11-20 08:28:02 +00:00
( ' module ' , command ) ,
( ' threads ' , env . get ( ' UWSGI_THREADS ' , ' 4 ' ) ) ,
2016-04-01 22:26:37 +00:00
] )
2021-02-27 18:08:05 +00:00
2016-11-15 11:04:15 +00:00
if python_version == 2 :
2016-05-14 17:31:02 +00:00
settings . extend ( [
2019-11-20 08:28:02 +00:00
( ' plugin ' , ' python ' ) ,
2016-05-14 17:31:02 +00:00
] )
2016-11-15 11:04:15 +00:00
if ' UWSGI_GEVENT ' in env :
settings . extend ( [
2019-11-20 08:28:02 +00:00
( ' plugin ' , ' gevent_python ' ) ,
( ' gevent ' , env [ ' UWSGI_GEVENT ' ] ) ,
2016-11-15 11:04:15 +00:00
] )
elif ' UWSGI_ASYNCIO ' in env :
2021-02-27 18:08:05 +00:00
try :
tasks = int ( env [ ' UWSGI_ASYNCIO ' ] )
settings . extend ( [
( ' plugin ' , ' asyncio_python ' ) ,
( ' async ' , tasks ) ,
] )
echo ( " -----> uwsgi will support {} async tasks " . format ( tasks ) , fg = ' yellow ' )
except ValueError :
echo ( " Error: malformed setting ' UWSGI_ASYNCIO ' , ignoring it. " . format ( ) , fg = ' red ' )
2016-11-15 11:04:15 +00:00
elif python_version == 3 :
settings . extend ( [
2019-11-20 08:28:02 +00:00
( ' plugin ' , ' python3 ' ) ,
2016-11-15 11:04:15 +00:00
] )
if ' UWSGI_ASYNCIO ' in env :
2021-02-27 18:08:05 +00:00
try :
tasks = int ( env [ ' UWSGI_ASYNCIO ' ] )
settings . extend ( [
( ' plugin ' , ' asyncio_python3 ' ) ,
( ' async ' , tasks ) ,
] )
echo ( " -----> uwsgi will support {} async tasks " . format ( tasks ) , fg = ' yellow ' )
except ValueError :
echo ( " Error: malformed setting ' UWSGI_ASYNCIO ' , ignoring it. " . format ( ) , fg = ' red ' )
2019-11-19 08:09:08 +00:00
2016-04-25 12:23:00 +00:00
# If running under nginx, don't expose a port at all
2016-05-14 17:31:02 +00:00
if ' NGINX_SERVER_NAME ' in env :
2018-12-23 13:59:48 +00:00
sock = join ( NGINX_ROOT , " {} .sock " . format ( app ) )
echo ( " -----> nginx will talk to uWSGI via {} " . format ( sock ) , fg = ' yellow ' )
2016-04-25 12:23:00 +00:00
settings . extend ( [
2016-04-25 12:32:52 +00:00
( ' socket ' , sock ) ,
2016-04-25 12:58:47 +00:00
( ' chmod-socket ' , ' 664 ' ) ,
2016-04-25 12:23:00 +00:00
] )
else :
2018-12-23 13:59:48 +00:00
echo ( " -----> nginx will talk to uWSGI via {BIND_ADDRESS:s} : {PORT:s} " . format ( * * env ) , fg = ' yellow ' )
2016-04-25 12:23:00 +00:00
settings . extend ( [
2019-11-20 08:28:02 +00:00
( ' http ' , ' {BIND_ADDRESS:s} : {PORT:s} ' . format ( * * env ) ) ,
2019-11-19 09:10:13 +00:00
( ' http-use-socket ' , ' {BIND_ADDRESS:s} : {PORT:s} ' . format ( * * env ) ) ,
2021-09-01 18:27:39 +00:00
( ' http-socket ' , ' {BIND_ADDRESS:s} : {PORT:s} ' . format ( * * env ) ) ,
2016-04-25 12:23:00 +00:00
] )
2016-05-03 18:51:47 +00:00
elif kind == ' web ' :
2018-12-23 13:59:48 +00:00
echo ( " -----> nginx will talk to the ' web ' process via {BIND_ADDRESS:s} : {PORT:s} " . format ( * * env ) , fg = ' yellow ' )
2018-08-22 18:18:57 +00:00
settings . append ( ( ' attach-daemon ' , command ) )
2019-08-30 10:07:55 +00:00
elif kind == ' static ' :
echo ( " -----> nginx serving static files only " . format ( * * env ) , fg = ' yellow ' )
2021-03-07 19:40:52 +00:00
elif kind == ' cron ' :
echo ( " -----> uwsgi scheduled cron for {command} " . format ( * * locals ( ) ) , fg = ' yellow ' )
2016-03-29 19:24:17 +00:00
else :
2016-03-31 22:26:52 +00:00
settings . append ( ( ' attach-daemon ' , command ) )
2019-11-19 08:09:08 +00:00
2019-08-30 10:07:55 +00:00
if kind in [ ' wsgi ' , ' web ' ] :
2019-11-20 08:28:02 +00:00
settings . append ( ( ' log-format ' ,
' %% (addr) - %% (user) [ %% (ltime)] " %% (method) %% (uri) %% (proto) " %% (status) %% (size) " %% (referer) " " %% (uagent) " %% (msecs)ms ' ) )
2019-11-19 08:09:08 +00:00
2016-05-14 18:10:10 +00:00
# remove unnecessary variables from the env in nginx.ini
for k in [ ' NGINX_ACL ' ] :
2016-05-14 18:16:09 +00:00
if k in env :
2016-05-14 18:10:10 +00:00
del env [ k ]
2019-08-22 16:40:00 +00:00
# insert user defined uwsgi settings if set
settings + = parse_settings ( join ( APP_ROOT , app , env . get ( " UWSGI_INCLUDE_FILE " ) ) ) . items ( ) if env . get ( " UWSGI_INCLUDE_FILE " ) else [ ]
2018-06-14 21:03:32 +00:00
for k , v in env . items ( ) :
2018-12-23 15:07:52 +00:00
settings . append ( ( ' env ' , ' {k:s} = {v} ' . format ( * * locals ( ) ) ) )
2016-05-03 21:18:03 +00:00
2019-08-30 10:07:55 +00:00
if kind != ' static ' :
with open ( available , ' w ' ) as h :
h . write ( ' [uwsgi] \n ' )
for k , v in settings :
h . write ( " {k:s} = {v} \n " . format ( * * locals ( ) ) )
copyfile ( available , enabled )
2016-04-01 22:26:37 +00:00
2023-04-06 16:50:52 +00:00
2023-04-06 16:44:52 +00:00
def do_stop ( app ) :
2019-06-20 14:02:11 +00:00
config = glob ( join ( UWSGI_ENABLED , ' {} *.ini ' . format ( app ) ) )
2019-11-20 08:28:02 +00:00
if len ( config ) > 0 :
2023-04-06 16:44:52 +00:00
echo ( " Stopping app ' {} ' ... " . format ( app ) , fg = ' yellow ' )
2019-06-20 14:02:11 +00:00
for c in config :
remove ( c )
else :
2023-04-06 16:50:52 +00:00
echo ( " Error: app ' {} ' not deployed! " . format ( app ) , fg = ' red ' ) # TODO app could be already stopped. Need to able to tell the difference.
2023-04-06 16:44:52 +00:00
def do_restart ( app ) :
""" Restarts a deployed app """
2023-04-06 16:50:52 +00:00
# This must work even if the app is stopped when called. At the end, the app should be running.
2023-04-06 16:55:08 +00:00
echo ( " restarting app ' {} ' ... " . format ( app ) , fg = ' yellow ' )
2023-04-06 16:44:52 +00:00
do_stop ( app )
spawn_app ( app )
2019-06-20 14:02:11 +00:00
2016-04-01 22:26:37 +00:00
2016-04-03 10:35:39 +00:00
def multi_tail ( app , filenames , catch_up = 20 ) :
2016-04-01 22:26:37 +00:00
""" Tails multiple log files """
2019-11-19 08:09:08 +00:00
2016-04-03 10:35:39 +00:00
# Seek helper
2016-04-01 22:26:37 +00:00
def peek ( handle ) :
where = handle . tell ( )
line = handle . readline ( )
if not line :
handle . seek ( where )
return None
return line
inodes = { }
files = { }
prefixes = { }
2019-11-19 08:09:08 +00:00
2016-04-03 10:35:39 +00:00
# Set up current state for each log file
2016-04-01 22:26:37 +00:00
for f in filenames :
2016-04-01 23:16:10 +00:00
prefixes [ f ] = splitext ( basename ( f ) ) [ 0 ]
2020-03-15 17:07:23 +00:00
files [ f ] = open ( f , " rt " , encoding = " utf-8 " , errors = " ignore " )
2016-09-10 09:37:37 +00:00
inodes [ f ] = stat ( f ) . st_ino
2016-04-01 23:16:10 +00:00
files [ f ] . seek ( 0 , 2 )
2019-11-19 08:09:08 +00:00
2016-04-01 22:26:37 +00:00
longest = max ( map ( len , prefixes . values ( ) ) )
2019-11-19 08:09:08 +00:00
# Grab a little history (if any)
2016-04-01 23:16:10 +00:00
for f in filenames :
2020-03-15 17:05:14 +00:00
for line in deque ( open ( f , " rt " , encoding = " utf-8 " , errors = " ignore " ) , catch_up ) :
2018-12-23 13:59:48 +00:00
yield " {} | {} " . format ( prefixes [ f ] . ljust ( longest ) , line )
2016-04-01 22:26:37 +00:00
while True :
updated = False
2016-04-03 10:35:39 +00:00
# Check for updates on every file
2016-04-01 22:26:37 +00:00
for f in filenames :
line = peek ( files [ f ] )
2019-11-20 08:28:02 +00:00
if line :
2016-04-01 22:26:37 +00:00
updated = True
2018-12-23 13:59:48 +00:00
yield " {} | {} " . format ( prefixes [ f ] . ljust ( longest ) , line )
2019-11-19 08:09:08 +00:00
2016-04-01 22:26:37 +00:00
if not updated :
2016-04-01 23:16:10 +00:00
sleep ( 1 )
2016-04-03 10:35:39 +00:00
# Check if logs rotated
2016-04-01 22:26:37 +00:00
for f in filenames :
2016-04-01 23:16:10 +00:00
if exists ( f ) :
2016-09-10 09:37:37 +00:00
if stat ( f ) . st_ino != inodes [ f ] :
2016-04-01 23:16:10 +00:00
files [ f ] = open ( f )
2016-09-10 09:37:37 +00:00
inodes [ f ] = stat ( f ) . st_ino
2016-04-01 23:16:10 +00:00
else :
filenames . remove ( f )
2016-04-01 22:26:37 +00:00
2016-03-27 12:12:13 +00:00
2019-11-19 08:09:08 +00:00
# === CLI commands ===
2019-11-18 21:33:01 +00:00
CONTEXT_SETTINGS = dict ( help_option_names = [ ' -h ' , ' --help ' ] )
2019-11-20 08:28:02 +00:00
2019-11-18 21:33:01 +00:00
@group ( context_settings = CONTEXT_SETTINGS )
2016-03-26 17:28:01 +00:00
def piku ( ) :
2016-04-03 10:35:39 +00:00
""" The smallest PaaS you ' ve ever seen """
2016-04-03 21:36:04 +00:00
pass
2016-03-26 22:47:39 +00:00
2023-01-07 18:05:02 +00:00
piku . rc = getattr ( piku , " result_callback " , None ) or getattr ( piku , " resultcallback " , None )
2022-10-15 23:12:56 +00:00
2022-10-15 18:25:26 +00:00
@piku.rc ( )
2016-03-26 12:52:54 +00:00
def cleanup ( ctx ) :
2016-04-06 21:46:32 +00:00
""" Callback from command execution -- add debugging to taste """
2016-03-28 18:28:24 +00:00
pass
2016-03-26 12:52:54 +00:00
2016-03-29 20:18:38 +00:00
# --- User commands ---
2016-03-26 12:52:54 +00:00
2023-01-07 18:05:02 +00:00
2016-04-06 21:46:32 +00:00
@piku.command ( " apps " )
2019-11-24 01:43:11 +00:00
def cmd_apps ( ) :
2019-05-22 12:20:10 +00:00
""" List apps, e.g.: piku apps """
2019-11-24 01:43:11 +00:00
apps = listdir ( APP_ROOT )
if not apps :
2021-01-21 18:44:57 +00:00
echo ( " There are no applications deployed. " )
2019-11-24 01:43:11 +00:00
return
2019-11-19 08:09:08 +00:00
2019-11-24 01:43:11 +00:00
for a in apps :
2019-11-18 21:30:39 +00:00
running = len ( glob ( join ( UWSGI_ENABLED , ' {} *.ini ' . format ( a ) ) ) ) != 0
echo ( ( ' * ' if running else ' ' ) + a , fg = ' green ' )
2016-04-06 21:46:32 +00:00
2016-04-02 15:46:03 +00:00
@piku.command ( " config " )
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_config ( app ) :
2019-05-22 12:24:30 +00:00
""" Show config, e.g.: piku config <app> """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2019-11-19 08:09:08 +00:00
2016-04-02 15:46:03 +00:00
config_file = join ( ENV_ROOT , app , ' ENV ' )
if exists ( config_file ) :
2016-04-02 16:38:53 +00:00
echo ( open ( config_file ) . read ( ) . strip ( ) , fg = ' white ' )
2016-04-05 06:27:35 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " Warning: app ' {} ' not deployed, no config found. " . format ( app ) , fg = ' yellow ' )
2016-04-02 15:46:03 +00:00
@piku.command ( " config:get " )
@argument ( ' app ' )
@argument ( ' setting ' )
2018-12-23 12:21:41 +00:00
def cmd_config_get ( app , setting ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku config:get <app> FOO """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2019-11-19 08:09:08 +00:00
2016-04-02 15:46:03 +00:00
config_file = join ( ENV_ROOT , app , ' ENV ' )
if exists ( config_file ) :
env = parse_settings ( config_file )
if setting in env :
2018-12-23 13:59:48 +00:00
echo ( " {} " . format ( env [ setting ] ) , fg = ' white ' )
2016-04-06 21:46:32 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " Warning: no active configuration for ' {} ' " . format ( app ) )
2016-04-02 15:46:03 +00:00
@piku.command ( " config:set " )
@argument ( ' app ' )
@argument ( ' settings ' , nargs = - 1 )
2018-12-23 12:21:41 +00:00
def cmd_config_set ( app , settings ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku config:set <app> FOO=bar BAZ=quux """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2019-11-19 08:09:08 +00:00
2016-04-02 15:46:03 +00:00
config_file = join ( ENV_ROOT , app , ' ENV ' )
env = parse_settings ( config_file )
for s in settings :
try :
k , v = map ( lambda x : x . strip ( ) , s . split ( " = " , 1 ) )
env [ k ] = v
2018-12-23 15:07:52 +00:00
echo ( " Setting {k:s} = {v} for ' {app:s} ' " . format ( * * locals ( ) ) , fg = ' white ' )
2019-11-20 19:37:37 +00:00
except Exception :
2018-12-23 13:59:48 +00:00
echo ( " Error: malformed setting ' {} ' " . format ( s ) , fg = ' red ' )
2016-04-02 16:38:53 +00:00
return
write_config ( config_file , env )
2016-04-02 15:46:03 +00:00
do_deploy ( app )
2016-05-03 21:45:58 +00:00
@piku.command ( " config:unset " )
@argument ( ' app ' )
@argument ( ' settings ' , nargs = - 1 )
2018-12-23 12:21:41 +00:00
def cmd_config_unset ( app , settings ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku config:unset <app> FOO """
2019-11-19 08:09:08 +00:00
2016-05-03 21:45:58 +00:00
app = exit_if_invalid ( app )
2019-11-19 08:09:08 +00:00
2016-05-03 21:45:58 +00:00
config_file = join ( ENV_ROOT , app , ' ENV ' )
env = parse_settings ( config_file )
for s in settings :
if s in env :
del env [ s ]
2018-12-23 13:59:48 +00:00
echo ( " Unsetting {} for ' {} ' " . format ( s , app ) , fg = ' white ' )
2016-05-03 21:45:58 +00:00
write_config ( config_file , env )
do_deploy ( app )
2016-04-02 15:46:03 +00:00
@piku.command ( " config:live " )
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_config_live ( app ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku config:live <app> """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2016-04-04 07:45:27 +00:00
2016-04-02 15:46:03 +00:00
live_config = join ( ENV_ROOT , app , ' LIVE_ENV ' )
if exists ( live_config ) :
2016-04-02 16:38:53 +00:00
echo ( open ( live_config ) . read ( ) . strip ( ) , fg = ' white ' )
2016-04-05 06:27:35 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " Warning: app ' {} ' not deployed, no config found. " . format ( app ) , fg = ' yellow ' )
2016-04-02 15:46:03 +00:00
2016-03-26 22:08:10 +00:00
@piku.command ( " deploy " )
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_deploy ( app ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku deploy <app> """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2016-03-26 22:19:30 +00:00
do_deploy ( app )
2016-03-26 22:47:39 +00:00
2016-03-29 20:18:38 +00:00
@piku.command ( " destroy " )
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_destroy ( app ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku destroy <app> """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2019-11-19 08:09:08 +00:00
2022-12-27 15:35:51 +00:00
# leave DATA_ROOT, since apps may create hard to reproduce data,
# and CACHE_ROOT, since `nginx` will set permissions to protect it
for p in [ join ( x , app ) for x in [ APP_ROOT , GIT_ROOT , ENV_ROOT , LOG_ROOT ] ] :
2016-03-29 20:18:38 +00:00
if exists ( p ) :
2022-12-27 15:35:51 +00:00
echo ( " --> Removing folder ' {} ' " . format ( p ) , fg = ' yellow ' )
2016-09-10 09:37:37 +00:00
rmtree ( p )
2016-04-01 21:53:08 +00:00
2018-12-23 13:59:48 +00:00
for p in [ join ( x , ' {} *.ini ' . format ( app ) ) for x in [ UWSGI_AVAILABLE , UWSGI_ENABLED ] ] :
2016-03-31 22:53:35 +00:00
g = glob ( p )
2019-11-20 08:28:02 +00:00
if len ( g ) > 0 :
2016-03-31 22:53:35 +00:00
for f in g :
2022-12-27 15:35:51 +00:00
echo ( " --> Removing file ' {} ' " . format ( f ) , fg = ' yellow ' )
2016-09-10 09:37:37 +00:00
remove ( f )
2019-11-19 08:09:08 +00:00
2019-11-20 08:28:02 +00:00
nginx_files = [ join ( NGINX_ROOT , " {} . {} " . format ( app , x ) ) for x in [ ' conf ' , ' sock ' , ' key ' , ' crt ' ] ]
2016-04-25 11:12:18 +00:00
for f in nginx_files :
2016-04-25 10:50:37 +00:00
if exists ( f ) :
2022-12-27 15:35:51 +00:00
echo ( " --> Removing file ' {} ' " . format ( f ) , fg = ' yellow ' )
2016-09-10 09:37:37 +00:00
remove ( f )
2016-03-26 22:47:39 +00:00
2019-06-28 11:55:34 +00:00
acme_link = join ( ACME_WWW , app )
acme_certs = realpath ( acme_link )
if exists ( acme_certs ) :
2022-12-27 15:35:51 +00:00
echo ( " --> Removing folder ' {} ' " . format ( acme_certs ) , fg = ' yellow ' )
2019-06-28 11:55:34 +00:00
rmtree ( acme_certs )
2022-12-27 15:35:51 +00:00
echo ( " --> Removing file ' {} ' " . format ( acme_link ) , fg = ' yellow ' )
2019-06-28 11:55:34 +00:00
unlink ( acme_link )
2023-01-07 18:05:02 +00:00
# These come last to make sure they're visible
2022-12-27 15:35:51 +00:00
for p in [ join ( x , app ) for x in [ DATA_ROOT , CACHE_ROOT ] ] :
if exists ( p ) :
echo ( " ==> Preserving folder ' {} ' " . format ( p ) , fg = ' red ' )
2019-11-19 08:09:08 +00:00
2016-04-06 21:46:32 +00:00
@piku.command ( " logs " )
2016-03-29 20:18:38 +00:00
@argument ( ' app ' )
2019-11-17 20:51:24 +00:00
@argument ( ' process ' , nargs = 1 , default = ' * ' )
def cmd_logs ( app , process ) :
""" Tail running logs, e.g: piku logs <app> [<process>] """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2016-04-02 17:09:39 +00:00
2019-11-17 20:51:24 +00:00
logfiles = glob ( join ( LOG_ROOT , app , process + ' .*.log ' ) )
2019-11-20 08:28:02 +00:00
if len ( logfiles ) > 0 :
2016-04-06 21:46:32 +00:00
for line in multi_tail ( app , logfiles ) :
echo ( line . strip ( ) , fg = ' white ' )
2016-03-29 20:18:38 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " No logs found for app ' {} ' . " . format ( app ) , fg = ' yellow ' )
2016-03-29 20:18:38 +00:00
2016-04-02 18:22:51 +00:00
@piku.command ( " ps " )
2016-04-02 16:38:53 +00:00
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_ps ( app ) :
2019-05-22 12:20:10 +00:00
""" Show process count, e.g: piku ps <app> """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2016-04-04 07:45:27 +00:00
2016-04-02 16:38:53 +00:00
config_file = join ( ENV_ROOT , app , ' SCALING ' )
if exists ( config_file ) :
echo ( open ( config_file ) . read ( ) . strip ( ) , fg = ' white ' )
2016-04-06 09:09:05 +00:00
else :
2018-12-23 13:59:48 +00:00
echo ( " Error: no workers found for app ' {} ' . " . format ( app ) , fg = ' red ' )
2016-04-02 16:38:53 +00:00
2016-04-02 18:22:51 +00:00
@piku.command ( " ps:scale " )
2016-04-02 16:38:53 +00:00
@argument ( ' app ' )
@argument ( ' settings ' , nargs = - 1 )
2018-12-23 12:21:41 +00:00
def cmd_ps_scale ( app , settings ) :
2019-05-22 12:27:42 +00:00
""" e.g.: piku ps:scale <app> <proc>=<count> """
2019-11-19 08:09:08 +00:00
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2016-04-04 07:45:27 +00:00
2016-04-02 16:38:53 +00:00
config_file = join ( ENV_ROOT , app , ' SCALING ' )
2019-11-20 08:28:02 +00:00
worker_count = { k : int ( v ) for k , v in parse_procfile ( config_file ) . items ( ) }
2016-04-02 21:32:10 +00:00
deltas = { }
2016-04-02 16:38:53 +00:00
for s in settings :
try :
2016-04-02 18:22:51 +00:00
k , v = map ( lambda x : x . strip ( ) , s . split ( " = " , 1 ) )
2019-11-20 08:28:02 +00:00
c = int ( v ) # check for integer value
2016-04-02 16:49:19 +00:00
if c < 0 :
2018-12-23 13:59:48 +00:00
echo ( " Error: cannot scale type ' {} ' below 0 " . format ( k ) , fg = ' red ' )
2016-04-02 16:49:19 +00:00
return
2016-04-02 16:38:53 +00:00
if k not in worker_count :
2018-12-23 13:59:48 +00:00
echo ( " Error: worker type ' {} ' not present in ' {} ' " . format ( k , app ) , fg = ' red ' )
2016-04-02 16:38:53 +00:00
return
2016-04-02 22:37:42 +00:00
deltas [ k ] = c - worker_count [ k ]
2019-11-20 19:37:37 +00:00
except Exception :
2018-12-23 13:59:48 +00:00
echo ( " Error: malformed setting ' {} ' " . format ( s ) , fg = ' red ' )
2016-04-02 16:38:53 +00:00
return
2016-04-06 10:59:42 +00:00
do_deploy ( app , deltas )
2016-04-02 16:38:53 +00:00
2016-09-09 23:11:39 +00:00
@piku.command ( " run " )
@argument ( ' app ' )
@argument ( ' cmd ' , nargs = - 1 )
2018-12-23 12:21:41 +00:00
def cmd_run ( app , cmd ) :
2019-05-22 12:24:30 +00:00
""" e.g.: piku run <app> ls -- -al """
2016-09-09 23:11:39 +00:00
app = exit_if_invalid ( app )
2016-09-09 23:46:04 +00:00
config_file = join ( ENV_ROOT , app , ' LIVE_ENV ' )
2016-09-10 09:37:37 +00:00
environ . update ( parse_settings ( config_file ) )
for f in [ stdout , stderr ] :
fl = fcntl ( f , F_GETFL )
fcntl ( f , F_SETFL , fl | O_NONBLOCK )
2019-11-20 08:28:02 +00:00
p = Popen ( ' ' . join ( cmd ) , stdin = stdin , stdout = stdout , stderr = stderr , env = environ , cwd = join ( APP_ROOT , app ) , shell = True )
2019-11-19 08:09:08 +00:00
p . communicate ( )
2016-09-09 23:11:39 +00:00
2019-11-20 08:28:02 +00:00
2016-04-06 21:46:32 +00:00
@piku.command ( " restart " )
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_restart ( app ) :
2019-05-22 12:20:10 +00:00
""" Restart an app: piku restart <app> """
2019-11-19 08:09:08 +00:00
2016-04-06 21:46:32 +00:00
app = exit_if_invalid ( app )
2019-06-20 14:02:11 +00:00
do_restart ( app )
2016-04-06 21:46:32 +00:00
2016-04-03 21:36:04 +00:00
@piku.command ( " setup " )
2018-12-23 12:21:41 +00:00
def cmd_setup ( ) :
2016-04-04 08:12:25 +00:00
""" Initialize environment """
2017-05-14 22:34:29 +00:00
2019-11-20 08:28:02 +00:00
echo ( " Running in Python {} " . format ( " . " . join ( map ( str , version_info ) ) ) )
2019-11-19 08:09:08 +00:00
2016-04-04 08:12:25 +00:00
# Create required paths
2022-12-27 15:11:31 +00:00
for p in [ APP_ROOT , CACHE_ROOT , DATA_ROOT , GIT_ROOT , ENV_ROOT , UWSGI_ROOT , UWSGI_AVAILABLE , UWSGI_ENABLED , LOG_ROOT , NGINX_ROOT ] :
2016-04-03 21:36:04 +00:00
if not exists ( p ) :
2018-12-23 13:59:48 +00:00
echo ( " Creating ' {} ' . " . format ( p ) , fg = ' green ' )
2016-09-10 09:37:37 +00:00
makedirs ( p )
2019-11-19 08:09:08 +00:00
2016-04-04 08:12:25 +00:00
# Set up the uWSGI emperor config
settings = [
2019-11-20 08:28:02 +00:00
( ' chdir ' , UWSGI_ROOT ) ,
( ' emperor ' , UWSGI_ENABLED ) ,
( ' log-maxsize ' , UWSGI_LOG_MAXSIZE ) ,
( ' logto ' , join ( UWSGI_ROOT , ' uwsgi.log ' ) ) ,
( ' log-backupname ' , join ( UWSGI_ROOT , ' uwsgi.old.log ' ) ) ,
( ' socket ' , join ( UWSGI_ROOT , ' uwsgi.sock ' ) ) ,
( ' uid ' , getpwuid ( getuid ( ) ) . pw_name ) ,
( ' gid ' , getgrgid ( getgid ( ) ) . gr_name ) ,
( ' enable-threads ' , ' true ' ) ,
( ' threads ' , ' {} ' . format ( cpu_count ( ) * 2 ) ) ,
2016-04-04 08:12:25 +00:00
]
2019-11-20 08:28:02 +00:00
with open ( join ( UWSGI_ROOT , ' uwsgi.ini ' ) , ' w ' ) as h :
2016-04-04 08:12:25 +00:00
h . write ( ' [uwsgi] \n ' )
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2016-04-04 08:12:25 +00:00
for k , v in settings :
2018-12-23 15:07:52 +00:00
h . write ( " {k:s} = {v} \n " . format ( * * locals ( ) ) )
2016-04-04 08:12:25 +00:00
2016-04-03 21:36:04 +00:00
# mark this script as executable (in case we were invoked via interpreter)
2019-11-20 08:28:02 +00:00
if not ( stat ( PIKU_SCRIPT ) . st_mode & S_IXUSR ) :
2018-12-23 14:01:25 +00:00
echo ( " Setting ' {} ' as executable. " . format ( PIKU_SCRIPT ) , fg = ' yellow ' )
2018-12-23 14:03:08 +00:00
chmod ( PIKU_SCRIPT , stat ( PIKU_SCRIPT ) . st_mode | S_IXUSR )
2016-04-03 21:36:04 +00:00
2016-04-03 16:14:15 +00:00
@piku.command ( " setup:ssh " )
@argument ( ' public_key_file ' )
2018-12-23 12:21:41 +00:00
def cmd_setup_ssh ( public_key_file ) :
2017-05-14 22:34:29 +00:00
""" Set up a new SSH key (use - for stdin) """
def add_helper ( key_file ) :
if exists ( key_file ) :
try :
fingerprint = str ( check_output ( ' ssh-keygen -lf ' + key_file , shell = True ) ) . split ( ' ' , 4 ) [ 1 ]
key = open ( key_file , ' r ' ) . read ( ) . strip ( )
2018-12-23 14:14:54 +00:00
echo ( " Adding key ' {} ' . " . format ( fingerprint ) , fg = ' white ' )
2018-12-23 14:01:25 +00:00
setup_authorized_keys ( fingerprint , PIKU_SCRIPT , key )
2018-12-23 15:31:55 +00:00
except Exception :
2018-12-23 14:14:54 +00:00
echo ( " Error: invalid public key file ' {} ' : {} " . format ( key_file , format_exc ( ) ) , fg = ' red ' )
2019-11-20 08:28:02 +00:00
elif public_key_file == ' - ' :
2017-05-14 22:34:29 +00:00
buffer = " " . join ( stdin . readlines ( ) )
with NamedTemporaryFile ( mode = " w " ) as f :
f . write ( buffer )
f . flush ( )
add_helper ( f . name )
else :
2018-12-23 14:14:54 +00:00
echo ( " Error: public key file ' {} ' not found. " . format ( key_file ) , fg = ' red ' )
2017-05-14 22:34:29 +00:00
add_helper ( public_key_file )
2016-04-03 16:14:15 +00:00
2016-04-06 21:46:32 +00:00
@piku.command ( " stop " )
2016-04-02 15:46:03 +00:00
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_stop ( app ) :
2019-05-22 12:37:18 +00:00
""" Stop an app, e.g: piku stop <app> """
2016-04-06 09:09:05 +00:00
app = exit_if_invalid ( app )
2023-04-06 16:44:52 +00:00
do_stop ( app )
2019-11-19 08:09:08 +00:00
2016-03-29 20:18:38 +00:00
# --- Internal commands ---
2016-03-26 22:47:39 +00:00
2019-11-28 11:54:33 +00:00
@piku.command ( " git-hook " )
2016-03-26 12:52:54 +00:00
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_git_hook ( app ) :
2016-03-28 22:35:31 +00:00
""" INTERNAL: Post-receive git hook """
2019-11-19 08:09:08 +00:00
2016-03-26 21:33:02 +00:00
app = sanitize_app_name ( app )
2016-03-27 12:08:30 +00:00
repo_path = join ( GIT_ROOT , app )
app_path = join ( APP_ROOT , app )
2023-09-18 14:36:42 +00:00
data_path = join ( DATA_ROOT , app )
2019-11-19 08:09:08 +00:00
2016-09-10 09:37:37 +00:00
for line in stdin :
2018-12-23 15:31:55 +00:00
# pylint: disable=unused-variable
2016-03-26 17:15:19 +00:00
oldrev , newrev , refname = line . strip ( ) . split ( " " )
2019-08-05 08:07:47 +00:00
# Handle pushes
if not exists ( app_path ) :
2023-09-18 14:38:22 +00:00
echo ( " -----> Creating app ' {} ' " . format ( app ) , fg = ' green ' )
2019-08-05 08:07:47 +00:00
makedirs ( app_path )
2023-09-18 14:36:42 +00:00
# The data directory may already exist, since this may be a full redeployment (we never delete data since it may be expensive to recreate)
if not exists ( data_path ) :
makedirs ( data_path )
2023-09-18 14:38:22 +00:00
call ( " git clone --quiet {} {} " . format ( repo_path , app ) , cwd = APP_ROOT , shell = True )
2019-08-05 08:07:47 +00:00
do_deploy ( app , newrev = newrev )
2016-03-29 20:18:38 +00:00
2019-11-28 11:54:33 +00:00
@piku.command ( " git-receive-pack " )
2016-03-29 20:18:38 +00:00
@argument ( ' app ' )
2018-12-23 12:21:41 +00:00
def cmd_git_receive_pack ( app ) :
2016-03-29 20:18:38 +00:00
""" INTERNAL: Handle git pushes for an app """
2017-05-15 20:37:14 +00:00
2016-03-29 20:18:38 +00:00
app = sanitize_app_name ( app )
hook_path = join ( GIT_ROOT , app , ' hooks ' , ' post-receive ' )
2018-12-23 14:19:52 +00:00
env = globals ( )
2018-12-23 15:07:52 +00:00
env . update ( locals ( ) )
2017-05-15 20:37:14 +00:00
2016-03-29 20:18:38 +00:00
if not exists ( hook_path ) :
2016-09-10 09:37:37 +00:00
makedirs ( dirname ( hook_path ) )
2016-03-29 20:18:38 +00:00
# Initialize the repository with a hook to this script
call ( " git init --quiet --bare " + app , cwd = GIT_ROOT , shell = True )
2017-05-15 20:37:14 +00:00
with open ( hook_path , ' w ' ) as h :
2016-03-29 20:18:38 +00:00
h . write ( """ #!/usr/bin/env bash
set - e ; set - o pipefail ;
2018-12-23 15:07:52 +00:00
cat | PIKU_ROOT = " {PIKU_ROOT:s} " { PIKU_SCRIPT : s } git - hook { app : s } """ .format(**env))
2016-03-29 20:18:38 +00:00
# Make the hook executable by our user
2016-09-10 09:37:37 +00:00
chmod ( hook_path , stat ( hook_path ) . st_mode | S_IXUSR )
2016-03-29 20:18:38 +00:00
# Handle the actual receive. We'll be called with 'git-hook' after it happens
2018-12-23 13:59:48 +00:00
call ( ' git-shell -c " {} " ' . format ( argv [ 1 ] + " ' {} ' " . format ( app ) ) , cwd = GIT_ROOT , shell = True )
2017-05-15 20:37:14 +00:00
2019-11-28 11:54:33 +00:00
@piku.command ( " git-upload-pack " )
2019-08-05 08:08:59 +00:00
@argument ( ' app ' )
2019-11-20 08:28:02 +00:00
def cmd_git_upload_pack ( app ) :
2019-08-05 08:08:59 +00:00
""" INTERNAL: Handle git upload pack for an app """
app = sanitize_app_name ( app )
env = globals ( )
env . update ( locals ( ) )
# Handle the actual receive. We'll be called with 'git-hook' after it happens
call ( ' git-shell -c " {} " ' . format ( argv [ 1 ] + " ' {} ' " . format ( app ) ) , cwd = GIT_ROOT , shell = True )
2024-04-11 17:53:29 +00:00
@piku.command ( " scp " , context_settings = dict ( ignore_unknown_options = True , allow_extra_args = True ) )
@pass_context
def cmd_scp ( ctx ) :
""" Simple wrapper to allow scp to work. """
call ( " " . join ( [ " scp " ] + ctx . args ) , cwd = GIT_ROOT , shell = True )
2019-11-17 02:38:33 +00:00
def _get_plugin_commands ( path ) :
sys_path . append ( abspath ( path ) )
cli_commands = [ ]
2019-11-26 04:20:25 +00:00
if isdir ( path ) :
for item in listdir ( path ) :
module_path = join ( path , item )
2019-11-26 04:23:35 +00:00
if isdir ( module_path ) :
try :
module = import_module ( item )
2019-11-26 04:39:01 +00:00
except Exception :
2019-11-26 04:23:35 +00:00
module = None
if hasattr ( module , ' cli_commands ' ) :
cli_commands . append ( module . cli_commands ( ) )
2019-11-17 02:38:33 +00:00
return cli_commands
2019-08-05 08:08:59 +00:00
2019-11-26 04:35:11 +00:00
2019-11-18 21:33:01 +00:00
@piku.command ( " help " )
@pass_context
def cmd_help ( ctx ) :
""" display help for piku """
echo ( ctx . parent . get_help ( ) )
2019-11-17 23:43:32 +00:00
@piku.command ( " update " )
def cmd_update ( ) :
""" Update the piku cli """
echo ( " Updating piku... " )
with NamedTemporaryFile ( mode = " w " ) as f :
tempfile = f . name
2021-01-21 18:52:03 +00:00
cmd = """ curl -sL -w % {{ http_code}} {} -o {} """ . format ( PIKU_RAW_SOURCE_URL , tempfile )
2019-11-17 23:43:32 +00:00
response = check_output ( cmd . split ( ' ' ) , stderr = STDOUT )
http_code = response . decode ( ' utf8 ' ) . strip ( )
if http_code == " 200 " :
copyfile ( tempfile , PIKU_SCRIPT )
2021-01-21 18:52:03 +00:00
echo ( " Update successful. " )
2019-11-17 23:43:32 +00:00
else :
2021-01-21 18:52:03 +00:00
echo ( " Error updating piku - please check if {} is accessible from this machine. " . format ( PIKU_RAW_SOURCE_URL ) )
2019-11-17 23:43:32 +00:00
echo ( " Done. " )
2016-03-26 12:52:54 +00:00
if __name__ == ' __main__ ' :
2021-12-08 16:17:53 +00:00
cli_commands = _get_plugin_commands ( path = PIKU_PLUGIN_ROOT )
2019-11-17 02:38:33 +00:00
cli_commands . append ( piku )
cli = CommandCollection ( sources = cli_commands )
cli ( )