kopia lustrzana https://gitlab.com/marnanel/chapeau
Porównaj commity
3 Commity
03708faed6
...
a4d1e8ed03
Autor | SHA1 | Data |
---|---|---|
Marnanel Thurman | a4d1e8ed03 | |
Marnanel Thurman | c1e66efff2 | |
Marnanel Thurman | c098602b68 |
|
@ -1,44 +0,0 @@
|
|||
from socketserver import TCPServer
|
||||
from fastcgi.core import FcgiHandler
|
||||
import accept
|
||||
|
||||
# environ:
|
||||
# query {'QUERY_STRING': '', 'REQUEST_METHOD': 'GET', 'CONTENT_TYPE': '', 'CONTENT_LENGTH': '', 'SCRIPT_NAME': '/', 'REQUEST_URI': '/', 'DOCUMENT_URI': '/', 'DOCUMENT_ROOT': '/var/www/html', 'SERVER_PROTOCOL': 'HTTP/1.1', 'REQUEST_SCHEME': 'https', 'HTTPS': 'on', 'GATEWAY_INTERFACE': 'CGI/1.1', 'SERVER_SOFTWARE': 'nginx/1.22.1', 'REMOTE_ADDR': '217.155.192.32', 'REMOTE_PORT': '65482', 'REMOTE_USER': '', 'SERVER_ADDR': '192.168.1.83', 'SERVER_PORT': '443', 'SERVER_NAME': 'sandy-heath.thurman.org.uk', 'REDIRECT_STATUS': '200', 'HTTP_HOST': 'sandy-heath.thurman.org.uk', 'HTTP_USER_AGENT': 'Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0', 'HTTP_ACCEPT': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8', 'HTTP_ACCEPT_LANGUAGE': 'en-GB,en;q=0.5', 'HTTP_ACCEPT_ENCODING': 'gzip, deflate, br', 'HTTP_CONNECTION': 'keep-alive', 'HTTP_COOKIE': 'csrftoken=Ga1NZdJTo0F5zaSXCmmYq69xSePMfzIk', 'HTTP_UPGRADE_INSECURE_REQUESTS': '1', 'HTTP_SEC_FETCH_DEST': 'document', 'HTTP_SEC_FETCH_MODE': 'navigate', 'HTTP_SEC_FETCH_SITE': 'cross-site', 'HTTP_SEC_GPC': '1'}
|
||||
|
||||
CONTENTTYPE_ACTIVITY = 'application/activity+json'
|
||||
CONTENTTYPE_HTML = 'text/html'
|
||||
|
||||
class TestHandler(FcgiHandler):
|
||||
def handle(self):
|
||||
# XXX Here we check HTTP_ACCEPT
|
||||
# XXX and DOCUMENT_URI and possibly QUERY_STRING
|
||||
# XXX and despatch as appropriate
|
||||
|
||||
print('query', self.environ)
|
||||
|
||||
mime = accept.parse(self.environ['HTTP_ACCEPT'])
|
||||
print(mime)
|
||||
print(self.environ['DOCUMENT_URI'])
|
||||
|
||||
response_headers = ''
|
||||
|
||||
response_headers += f'Content-Type: {CONTENTTYPE_ACTIVITY}\r\n'
|
||||
response_headers += '\r\n'
|
||||
|
||||
response_body = '<html>foobar</html>'
|
||||
|
||||
response_body = response_body.encode('UTF-8')
|
||||
response_headers = response_headers.encode('UTF-8')
|
||||
|
||||
self['stdout'].write(
|
||||
response_headers +
|
||||
b'\r\n\r\n' +
|
||||
response_body
|
||||
)
|
||||
|
||||
def run():
|
||||
with TCPServer(('localhost',17177), TestHandler) as srv:
|
||||
srv.handle_request()
|
||||
|
||||
if __name__=='__main__':
|
||||
run()
|
|
@ -5,6 +5,8 @@ import logging
|
|||
import json
|
||||
|
||||
import kepi.validate
|
||||
import kepi.fastcgi
|
||||
from kepi.config import config
|
||||
|
||||
logger = logging.getLogger('kepi')
|
||||
logging.basicConfig(
|
||||
|
@ -12,6 +14,11 @@ logging.basicConfig(
|
|||
stream = sys.stdout,
|
||||
)
|
||||
|
||||
COMMANDS = [
|
||||
'validate',
|
||||
'fastcgi',
|
||||
]
|
||||
|
||||
def daemonise(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
|
||||
|
||||
pid = os.fork()
|
||||
|
@ -55,33 +62,23 @@ def load_message(name):
|
|||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='send or receive ActivityPub messages')
|
||||
prog='kepi',
|
||||
description='send or receive ActivityPub messages',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--spool', '-S', default='/var/spool/kepi',
|
||||
help='directory to store the messages')
|
||||
|
||||
subparsers = parser.add_subparsers(
|
||||
dest = 'command',
|
||||
required = True,
|
||||
)
|
||||
config.parse_args(parser)
|
||||
|
||||
validate_parser = subparsers.add_parser('validate')
|
||||
|
||||
validate_parser.add_argument(
|
||||
'input',
|
||||
help=(
|
||||
'the file to read ("-" for stdin)'
|
||||
),
|
||||
)
|
||||
for command in COMMANDS:
|
||||
getattr(kepi, command).add_argparser(subparsers)
|
||||
|
||||
args = parser.parse_args()
|
||||
config.handle_argparse(args)
|
||||
|
||||
if args.command=='validate':
|
||||
message = load_message(args.input)
|
||||
kepi.validate.validate(message)
|
||||
else:
|
||||
raise NotImplementedError()
|
||||
getattr(kepi, args.command).run()
|
||||
|
||||
if __name__=='__main__':
|
||||
main()
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
import argparse
|
||||
|
||||
class Config:
|
||||
|
||||
settings = {}
|
||||
subcommands = []
|
||||
argparser = None
|
||||
|
||||
def __init__(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def users(self):
|
||||
raise ValueError()
|
||||
|
||||
def __getattr__(self, field):
|
||||
if field in self.settings:
|
||||
return self.settings[field]
|
||||
else:
|
||||
raise KeyError(field)
|
||||
|
||||
def parse_args(self, argparser):
|
||||
|
||||
assert self.subcommands is not None
|
||||
|
||||
self.argparser = argparser
|
||||
|
||||
self.subparsers = argparser.add_subparsers(
|
||||
dest = 'command',
|
||||
required = True,
|
||||
)
|
||||
|
||||
for sub in self.subcommands:
|
||||
sub(self.subparsers)
|
||||
|
||||
self.subcommands = None
|
||||
|
||||
args = self.argparser.parse_args()
|
||||
|
||||
for field in dir(args):
|
||||
if field.startswith('_'):
|
||||
continue
|
||||
self.settings[field] = getattr(args, field)
|
||||
|
||||
# TODO here we will check the config file, and merge them in.
|
||||
|
||||
config = Config.__new__(Config)
|
||||
|
||||
def subcommand(fn):
|
||||
config.subcommands.append(fn)
|
||||
return None
|
|
@ -0,0 +1,123 @@
|
|||
from socketserver import TCPServer
|
||||
from fastcgi.core import FcgiHandler
|
||||
import re
|
||||
import logging
|
||||
from kepi.config import config, subcommand
|
||||
|
||||
logger = logging.getLogger('kepi.fastcgi')
|
||||
|
||||
# environ:
|
||||
# query {'QUERY_STRING': '', 'REQUEST_METHOD': 'GET', 'CONTENT_TYPE': '', 'CONTENT_LENGTH': '', 'SCRIPT_NAME': '/', 'REQUEST_URI': '/', 'DOCUMENT_URI': '/', 'DOCUMENT_ROOT': '/var/www/html', 'SERVER_PROTOCOL': 'HTTP/1.1', 'REQUEST_SCHEME': 'https', 'HTTPS': 'on', 'GATEWAY_INTERFACE': 'CGI/1.1', 'SERVER_SOFTWARE': 'nginx/1.22.1', 'REMOTE_ADDR': '217.155.192.32', 'REMOTE_PORT': '65482', 'REMOTE_USER': '', 'SERVER_ADDR': '192.168.1.83', 'SERVER_PORT': '443', 'SERVER_NAME': 'sandy-heath.thurman.org.uk', 'REDIRECT_STATUS': '200', 'HTTP_HOST': 'sandy-heath.thurman.org.uk', 'HTTP_USER_AGENT': 'Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/115.0', 'HTTP_ACCEPT': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8', 'HTTP_ACCEPT_LANGUAGE': 'en-GB,en;q=0.5', 'HTTP_ACCEPT_ENCODING': 'gzip, deflate, br', 'HTTP_CONNECTION': 'keep-alive', 'HTTP_COOKIE': 'csrftoken=Ga1NZdJTo0F5zaSXCmmYq69xSePMfzIk', 'HTTP_UPGRADE_INSECURE_REQUESTS': '1', 'HTTP_SEC_FETCH_DEST': 'document', 'HTTP_SEC_FETCH_MODE': 'navigate', 'HTTP_SEC_FETCH_SITE': 'cross-site', 'HTTP_SEC_GPC': '1'}
|
||||
|
||||
CONTENTTYPE_ACTIVITY = 'application/activity+json'
|
||||
CONTENTTYPE_HTML = 'text/html'
|
||||
|
||||
USER_PAGE_RE = r'users/([a-z0-9-]+)/?'
|
||||
|
||||
ERROR_404 = """Content-Type: text/html
|
||||
Status: 404 Not found
|
||||
|
||||
That resource does not exist here.
|
||||
"""
|
||||
|
||||
DEFAULT_HOST = 'localhost'
|
||||
DEFAULT_PORT = 17177
|
||||
|
||||
@subcommand
|
||||
def fastcgi_command(subparsers):
|
||||
parser = subparsers.add_parser('fastcgi')
|
||||
|
||||
parser.add_argument(
|
||||
'--port', '-P',
|
||||
default=DEFAULT_PORT,
|
||||
type=int,
|
||||
help=(
|
||||
f'the port to listen on (default {DEFAULT_PORT})'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--host', '-H',
|
||||
default=DEFAULT_HOST,
|
||||
help=(
|
||||
f'the host to listen on (default {DEFAULT_HOST})'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help=(
|
||||
'print more information'
|
||||
),
|
||||
)
|
||||
|
||||
def _encode(s):
|
||||
return s.replace('\n','\r\n').encode('UTF-8')
|
||||
|
||||
class KepiHandler(FcgiHandler):
|
||||
|
||||
def _handle_user_page(self, match):
|
||||
|
||||
username = match.groups(1)
|
||||
|
||||
|
||||
result = f"""Content-Type: {CONTENTTYPE_ACTIVITY}
|
||||
Status: 200
|
||||
|
||||
Hello world.
|
||||
"""
|
||||
|
||||
def handle(self):
|
||||
# XXX Here we check HTTP_ACCEPT
|
||||
# XXX and DOCUMENT_URI and possibly QUERY_STRING
|
||||
# XXX and despatch as appropriate
|
||||
|
||||
logger.debug('query: %s', self.environ)
|
||||
|
||||
uri = self.environ['DOCUMENT_URI']
|
||||
print(self.__dict__)
|
||||
|
||||
"""
|
||||
response_headers = ''
|
||||
|
||||
response_headers += f'Content-Type: {CONTENTTYPE_ACTIVITY}\r\n'
|
||||
response_headers += '\r\n'
|
||||
|
||||
response_body = response_body.encode('UTF-8')
|
||||
response_headers = response_headers.encode('UTF-8')
|
||||
"""
|
||||
|
||||
for regex, handler in [
|
||||
(USER_PAGE_RE, self._handle_user_page),
|
||||
]:
|
||||
|
||||
match = re.match(regex, uri)
|
||||
print(regex, uri, match)
|
||||
if match is None:
|
||||
continue
|
||||
|
||||
result = handler(
|
||||
match = match,
|
||||
)
|
||||
|
||||
if result is not None:
|
||||
self['stdout'].write(_encode(result))
|
||||
return
|
||||
|
||||
self['stdout'].write(_encode(ERROR_404))
|
||||
|
||||
def run():
|
||||
|
||||
if config.verbose:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
with TCPServer((config.host, config.port), KepiHandler) as srv:
|
||||
logger.info("Listening at %s:%s",
|
||||
config.host, config.port)
|
||||
srv.handle_request()
|
||||
|
||||
if __name__=='__main__':
|
||||
run()
|
|
@ -2,13 +2,24 @@ import json
|
|||
import httpsig
|
||||
import logging
|
||||
import kepi
|
||||
|
||||
from kepi.config import config, subcommand
|
||||
import kepi.fetch
|
||||
|
||||
# Somewhat based on bowler. Will merge things when the dust settles.
|
||||
|
||||
logger = logging.getLogger('kepi')
|
||||
|
||||
@subcommand
|
||||
def validate_command(subparsers):
|
||||
parser = subparsers.add_parser('validate')
|
||||
|
||||
parser.add_argument(
|
||||
'input',
|
||||
help=(
|
||||
'the file to read ("-" for stdin)'
|
||||
),
|
||||
)
|
||||
|
||||
def validate(
|
||||
message,
|
||||
):
|
||||
|
|
Ładowanie…
Reference in New Issue