microblog.pub/app/httpsig.py

354 wiersze
12 KiB
Python

2022-06-22 18:11:22 +00:00
import base64
import hashlib
import json
2022-06-22 18:11:22 +00:00
import typing
from dataclasses import dataclass
from datetime import datetime
2022-07-21 19:49:42 +00:00
from datetime import timedelta
from datetime import timezone
2022-06-22 18:11:22 +00:00
from typing import Any
from typing import Dict
from typing import MutableMapping
2022-06-22 18:11:22 +00:00
from typing import Optional
2022-08-15 08:15:00 +00:00
from urllib.parse import urlparse
2022-06-22 18:11:22 +00:00
import fastapi
import httpx
2022-06-30 07:25:13 +00:00
from cachetools import LFUCache
2022-06-22 18:11:22 +00:00
from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5
2022-07-21 19:49:42 +00:00
from dateutil.parser import parse
2022-06-22 18:11:22 +00:00
from loguru import logger
2022-06-30 07:25:13 +00:00
from sqlalchemy import select
2022-06-22 18:11:22 +00:00
2022-06-24 20:41:43 +00:00
from app import activitypub as ap
2022-06-22 18:11:22 +00:00
from app import config
2022-07-04 18:25:27 +00:00
from app.config import KEY_PATH
2022-06-30 07:25:13 +00:00
from app.database import AsyncSession
from app.database import get_db_session
2022-06-22 18:11:22 +00:00
from app.key import Key
2022-07-21 19:49:42 +00:00
from app.utils.datetime import now
2022-12-04 10:51:52 +00:00
from app.utils.url import is_hostname_blocked
2022-06-22 18:11:22 +00:00
_KEY_CACHE: MutableMapping[str, Key] = LFUCache(256)
2022-06-30 07:25:13 +00:00
2022-06-22 18:11:22 +00:00
def _build_signed_string(
2022-07-20 18:29:49 +00:00
signed_headers: str,
method: str,
path: str,
headers: Any,
body_digest: str | None,
sig_data: dict[str, Any],
2022-07-21 19:49:42 +00:00
) -> tuple[str, datetime | None]:
signature_date: datetime | None = None
2022-06-22 18:11:22 +00:00
out = []
for signed_header in signed_headers.split(" "):
2022-07-21 19:49:42 +00:00
if signed_header == "(created)":
signature_date = datetime.fromtimestamp(int(sig_data["created"])).replace(
tzinfo=timezone.utc
)
elif signed_header == "date":
signature_date = parse(headers["date"])
2022-06-22 18:11:22 +00:00
if signed_header == "(request-target)":
out.append("(request-target): " + method.lower() + " " + path)
elif signed_header == "digest" and body_digest:
out.append("digest: " + body_digest)
2022-07-20 18:29:49 +00:00
elif signed_header in ["(created)", "(expires)"]:
out.append(
signed_header
+ ": "
+ sig_data[signed_header[1 : len(signed_header) - 1]]
)
2022-06-22 18:11:22 +00:00
else:
out.append(signed_header + ": " + headers[signed_header])
2022-07-21 19:49:42 +00:00
return "\n".join(out), signature_date
2022-06-22 18:11:22 +00:00
def _parse_sig_header(val: Optional[str]) -> Optional[Dict[str, str]]:
if not val:
return None
out = {}
for data in val.split(","):
k, v = data.split("=", 1)
out[k] = v[1 : len(v) - 1] # noqa: black conflict
return out
def _verify_h(signed_string, signature, pubkey):
signer = PKCS1_v1_5.new(pubkey)
digest = SHA256.new()
digest.update(signed_string.encode("utf-8"))
return signer.verify(digest, signature)
def _body_digest(body: bytes) -> str:
h = hashlib.new("sha256")
h.update(body) # type: ignore
return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
async def _get_public_key(
db_session: AsyncSession,
key_id: str,
should_skip_cache: bool = False,
) -> Key:
if not should_skip_cache and (cached_key := _KEY_CACHE.get(key_id)):
2022-07-15 11:05:08 +00:00
logger.info(f"Key {key_id} found in cache")
2022-06-30 07:25:13 +00:00
return cached_key
# Check if the key belongs to an actor already in DB
from app import models
2022-06-30 07:25:13 +00:00
existing_actor = (
await db_session.scalars(
select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0])
)
).one_or_none()
if not should_skip_cache:
if existing_actor and existing_actor.public_key_id == key_id:
k = Key(existing_actor.ap_id, key_id)
k.load_pub(existing_actor.public_key_as_pem)
logger.info(f"Found {key_id} on an existing actor")
_KEY_CACHE[key_id] = k
return k
2022-06-30 07:25:13 +00:00
# Fetch it
2022-06-22 18:11:22 +00:00
from app import activitypub as ap
2022-10-07 17:00:18 +00:00
from app.actor import RemoteActor
2022-10-09 09:36:00 +00:00
from app.actor import update_actor_if_needed
2022-06-22 18:11:22 +00:00
2022-07-14 17:43:02 +00:00
# Without signing the request as if it's the first contact, the 2 servers
# might race to fetch each other key
2022-07-18 19:02:46 +00:00
try:
actor = await ap.fetch(key_id, disable_httpsig=True)
2022-09-12 06:04:16 +00:00
except ap.ObjectUnavailableError:
actor = await ap.fetch(key_id, disable_httpsig=False)
2022-07-21 19:49:42 +00:00
2022-06-22 18:11:22 +00:00
if actor["type"] == "Key":
# The Key is not embedded in the Person
k = Key(actor["owner"], actor["id"])
k.load_pub(actor["publicKeyPem"])
else:
k = Key(actor["id"], actor["publicKey"]["id"])
k.load_pub(actor["publicKey"]["publicKeyPem"])
# Ensure the right key was fetch
2022-08-28 09:24:46 +00:00
# TODO: some server have the key ID `http://` but fetching it return `https`
2022-07-10 17:29:36 +00:00
if key_id not in [k.key_id(), k.owner]:
2022-06-22 18:11:22 +00:00
raise ValueError(
2022-07-10 17:29:36 +00:00
f"failed to fetch requested key {key_id}: got {actor['publicKey']}"
2022-06-22 18:11:22 +00:00
)
2022-10-09 09:36:00 +00:00
if should_skip_cache and actor["type"] != "Key" and existing_actor:
# We had to skip the cache, which means the actor key probably changed
# and we want to update our cached version
2022-10-09 09:36:00 +00:00
await update_actor_if_needed(db_session, existing_actor, RemoteActor(actor))
await db_session.commit()
2022-06-30 07:25:13 +00:00
_KEY_CACHE[key_id] = k
2022-06-22 18:11:22 +00:00
return k
@dataclass(frozen=True)
class HTTPSigInfo:
has_valid_signature: bool
signed_by_ap_actor_id: str | None = None
2022-07-06 17:04:38 +00:00
is_ap_actor_gone: bool = False
2022-07-21 19:49:42 +00:00
is_unsupported_algorithm: bool = False
is_expired: bool = False
is_from_blocked_server: bool = False
2022-08-15 08:15:00 +00:00
server: str | None = None
2022-06-22 18:11:22 +00:00
async def httpsig_checker(
request: fastapi.Request,
2022-06-30 07:25:13 +00:00
db_session: AsyncSession = fastapi.Depends(get_db_session),
2022-06-22 18:11:22 +00:00
) -> HTTPSigInfo:
body = await request.body()
hsig = _parse_sig_header(request.headers.get("Signature"))
if not hsig:
logger.info("No HTTP signature found")
return HTTPSigInfo(has_valid_signature=False)
2022-08-15 08:15:00 +00:00
try:
key_id = hsig["keyId"]
except KeyError:
logger.info("Missing keyId")
return HTTPSigInfo(
has_valid_signature=False,
)
server = urlparse(key_id).hostname
2022-12-04 10:51:52 +00:00
if is_hostname_blocked(server):
return HTTPSigInfo(
has_valid_signature=False,
server=server,
is_from_blocked_server=True,
)
2022-08-15 08:15:00 +00:00
2022-07-21 19:49:42 +00:00
if alg := hsig.get("algorithm") not in ["rsa-sha256", "hs2019"]:
logger.info(f"Unsupported HTTP sig algorithm: {alg}")
return HTTPSigInfo(
has_valid_signature=False,
is_unsupported_algorithm=True,
2022-08-15 08:15:00 +00:00
server=server,
2022-07-21 19:49:42 +00:00
)
# Try to drop Delete activity spams early on, this prevent making an extra
# HTTP requests trying to fetch an unavailable actor to verify the HTTP sig
try:
if request.method == "POST" and request.url.path.endswith("/inbox"):
from app import models # TODO: solve this circular import
activity = json.loads(body)
actor_id = ap.get_id(activity["actor"])
if (
ap.as_list(activity["type"])[0] == "Delete"
and actor_id == ap.get_id(activity["object"])
and not (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == actor_id,
)
)
).one_or_none()
):
logger.info(f"Dropping Delete activity early for {body=}")
raise fastapi.HTTPException(status_code=202)
except fastapi.HTTPException as http_exc:
raise http_exc
except Exception:
logger.exception("Failed to check for Delete spam")
2022-08-20 07:11:48 +00:00
# logger.debug(f"hsig={hsig}")
2022-07-21 19:49:42 +00:00
signed_string, signature_date = _build_signed_string(
2022-06-22 18:11:22 +00:00
hsig["headers"],
request.method,
request.url.path,
request.headers,
_body_digest(body) if body else None,
2022-07-20 18:29:49 +00:00
hsig,
2022-06-22 18:11:22 +00:00
)
2022-07-21 19:49:42 +00:00
# Sanity checks on the signature date
if signature_date is None or now() - signature_date > timedelta(hours=12):
logger.info(f"Signature expired: {signature_date=}")
return HTTPSigInfo(
has_valid_signature=False,
is_expired=True,
2022-08-15 08:15:00 +00:00
server=server,
2022-07-21 19:49:42 +00:00
)
2022-06-22 18:11:22 +00:00
try:
2022-06-30 07:25:13 +00:00
k = await _get_public_key(db_session, hsig["keyId"])
2022-07-13 18:05:15 +00:00
except (ap.ObjectIsGoneError, ap.ObjectNotFoundError):
logger.info("Actor is gone or not found")
2022-07-06 17:04:38 +00:00
return HTTPSigInfo(has_valid_signature=False, is_ap_actor_gone=True)
2022-06-22 18:11:22 +00:00
except Exception:
logger.exception(f'Failed to fetch HTTP sig key {hsig["keyId"]}')
return HTTPSigInfo(has_valid_signature=False)
2022-08-24 18:12:10 +00:00
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
# If the signature is not valid, we may have to update the cached actor
if not has_valid_signature:
logger.info("Invalid signature, trying to refresh actor")
try:
k = await _get_public_key(db_session, hsig["keyId"], should_skip_cache=True)
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
except Exception:
logger.exception("Failed to refresh actor")
2022-08-24 18:12:10 +00:00
2022-06-22 18:11:22 +00:00
httpsig_info = HTTPSigInfo(
2022-08-24 18:12:10 +00:00
has_valid_signature=has_valid_signature,
2022-06-22 18:11:22 +00:00
signed_by_ap_actor_id=k.owner,
2022-08-15 08:15:00 +00:00
server=server,
2022-06-22 18:11:22 +00:00
)
logger.info(f"Valid HTTP signature for {httpsig_info.signed_by_ap_actor_id}")
return httpsig_info
async def enforce_httpsig(
request: fastapi.Request,
httpsig_info: HTTPSigInfo = fastapi.Depends(httpsig_checker),
) -> HTTPSigInfo:
2022-07-21 19:49:42 +00:00
"""FastAPI Depends"""
if httpsig_info.is_from_blocked_server:
2022-08-15 08:15:00 +00:00
logger.warning(f"{httpsig_info.server} is blocked")
raise fastapi.HTTPException(status_code=403, detail="Blocked")
2022-06-22 18:11:22 +00:00
if not httpsig_info.has_valid_signature:
logger.warning(f"Invalid HTTP sig {httpsig_info=}")
body = await request.body()
logger.info(f"{body=}")
2022-07-06 17:04:38 +00:00
# Special case for Mastoodon instance that keep resending Delete
# activities for actor we don't know about if we raise a 401
if httpsig_info.is_ap_actor_gone:
2022-07-06 19:13:33 +00:00
logger.info("Let's make Mastodon happy, returning a 202")
raise fastapi.HTTPException(status_code=202)
2022-07-06 17:04:38 +00:00
2022-07-21 19:49:42 +00:00
detail = "Invalid HTTP sig"
if httpsig_info.is_unsupported_algorithm:
detail = "Unsupported signature algorithm, must be rsa-sha256 or hs2019"
elif httpsig_info.is_expired:
detail = "Signature expired"
raise fastapi.HTTPException(status_code=401, detail=detail)
2022-06-22 18:11:22 +00:00
return httpsig_info
class HTTPXSigAuth(httpx.Auth):
def __init__(self, key: Key) -> None:
self.key = key
def auth_flow(
self, r: httpx.Request
) -> typing.Generator[httpx.Request, httpx.Response, None]:
logger.info(f"keyid={self.key.key_id()}")
bodydigest = None
if r.content:
bh = hashlib.new("sha256")
bh.update(r.content)
bodydigest = "SHA-256=" + base64.b64encode(bh.digest()).decode("utf-8")
date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT")
r.headers["Date"] = date
if bodydigest:
r.headers["Digest"] = bodydigest
sigheaders = "(request-target) user-agent host date digest content-type"
else:
sigheaders = "(request-target) user-agent host date accept"
2022-07-21 19:49:42 +00:00
to_be_signed, _ = _build_signed_string(
2022-07-20 18:29:49 +00:00
sigheaders, r.method, r.url.path, r.headers, bodydigest, {}
2022-06-22 18:11:22 +00:00
)
if not self.key.privkey:
raise ValueError("Should never happen")
signer = PKCS1_v1_5.new(self.key.privkey)
digest = SHA256.new()
digest.update(to_be_signed.encode("utf-8"))
sig = base64.b64encode(signer.sign(digest)).decode()
key_id = self.key.key_id()
sig_value = f'keyId="{key_id}",algorithm="rsa-sha256",headers="{sigheaders}",signature="{sig}"' # noqa: E501
logger.debug(f"signed request {sig_value=}")
r.headers["Signature"] = sig_value
yield r
k = Key(config.ID, f"{config.ID}#main-key")
2022-07-04 18:25:27 +00:00
k.load(KEY_PATH.read_text())
2022-06-22 18:11:22 +00:00
auth = HTTPXSigAuth(k)