Refactor jsonld context management. Move LD signing to entities/activitypub. Move jsonld document_loader caching to __init__.py. Fix tests.

ld-signatures
Alain St-Denis 2023-03-24 18:34:15 -04:00
rodzic 913bc300df
commit 41785c2fdc
16 zmienionych plików z 462 dodań i 340 usunięć

Wyświetl plik

@ -23,7 +23,7 @@ arrow
freezegun
# Django support
django>=1.8,<2.3
django>=3.2,<4
pytest-django
# Releasing

Wyświetl plik

@ -0,0 +1,35 @@
import json
from datetime import timedelta
from pyld import jsonld
from federation.utils.django import get_redis
cache = get_redis() or {}
EXPIRATION = int(timedelta(weeks=4).total_seconds())
# This is required to workaround a bug in pyld that has the Accept header
# accept other content types. From what I understand, precedence handling
# is broken
# from https://github.com/digitalbazaar/pyld/issues/133
# cacheing loosely inspired by https://github.com/digitalbazaar/pyld/issues/70
def get_loader(*args, **kwargs):
requests_loader = jsonld.requests_document_loader(*args, **kwargs)
def loader(url, options={}):
key = f'ld_cache:{url}'
try:
return json.loads(cache[key])
except KeyError:
options['headers']['Accept'] = 'application/ld+json'
doc = requests_loader(url, options)
if isinstance(cache, dict):
cache[url] = json.dumps(doc)
else:
cache.set(f'ld_cache:{url}', json.dumps(doc), ex=EXPIRATION)
return doc
return loader
jsonld.set_document_loader(get_loader())

Wyświetl plik

@ -1,22 +1,4 @@
CONTEXT_ACTIVITYSTREAMS = "https://www.w3.org/ns/activitystreams"
CONTEXT_DIASPORA = {"diaspora": "https://diasporafoundation.org/ns/"}
CONTEXT_HASHTAG = {"Hashtag": "as:Hashtag"}
CONTEXT_LD_SIGNATURES = "https://w3id.org/security/v1"
CONTEXT_MANUALLY_APPROVES_FOLLOWERS = {"manuallyApprovesFollowers": "as:manuallyApprovesFollowers"}
CONTEXT_PYTHON_FEDERATION = {"pyfed": "https://docs.jasonrobinson.me/ns/python-federation#"}
CONTEXT_SENSITIVE = {"sensitive": "as:sensitive"}
CONTEXTS_DEFAULT = [
CONTEXT_ACTIVITYSTREAMS,
CONTEXT_PYTHON_FEDERATION,
]
CONTEXT = [CONTEXT_ACTIVITYSTREAMS, CONTEXT_LD_SIGNATURES]
CONTEXT_DICT = {}
for ctx in [CONTEXT_DIASPORA, CONTEXT_HASHTAG, CONTEXT_MANUALLY_APPROVES_FOLLOWERS, CONTEXT_SENSITIVE, CONTEXT_PYTHON_FEDERATION]:
CONTEXT_DICT.update(ctx)
CONTEXT_SETS = {prop: {'@id': f'as:{prop}', '@container': '@set'} for prop in ['to', 'cc', 'tag', 'attachment']}
CONTEXT_DICT.update(CONTEXT_SETS)
CONTEXT.append(CONTEXT_DICT)
CONTEXT_SECURITY = "https://w3id.org/security/v1"
NAMESPACE_PUBLIC = "https://www.w3.org/ns/activitystreams#Public"

Wyświetl plik

@ -0,0 +1,151 @@
import copy
import json
from marshmallow import missing
from pyld import jsonld
from federation.entities.activitypub.constants import CONTEXT_ACTIVITYSTREAMS, CONTEXT_SECURITY, NAMESPACE_PUBLIC
# Extract context information from the metadata parameter defined for fields
# that are not part of the official AP spec. Use the same extended context for
# inbound payload. For outbound payload, build a context with only the required
# extensions
class LdContextManager:
_named = [CONTEXT_ACTIVITYSTREAMS, CONTEXT_SECURITY]
_extensions = {}
_merged = []
_models = []
def __init__(self, models):
self._models = models
for klass in models:
self._extensions[klass] = {}
ctx = getattr(klass, 'ctx', [])
if ctx:
self._extensions[klass].update({klass.__name__:ctx})
for name, value in klass.schema().declared_fields.items():
ctx = value.metadata.get('ctx') or []
if ctx:
self._extensions[klass].update({name:ctx})
merged = {}
for field in self._extensions.values():
for ctx in field.values():
self._add_extensions(ctx, self._named, merged)
self._merged = copy.copy(self._named)
self._merged.append(merged)
def _add_extensions(self, field, named, extensions):
for item in field:
if isinstance(item, str) and item not in named:
named.append(item)
elif isinstance(item, dict):
extensions.update(item)
def _get_fields(self, obj):
for klass in self._extensions.keys():
if issubclass(type(obj), klass):
return self._extensions[klass]
return {}
def compact(self, obj):
payload = jsonld.compact(obj.dump(), self.build_context(obj))
patched = copy.copy(payload)
# This is for platforms that don't handle the single element array
# compaction to a single value and https://www.w3.org/ns/activitystreams#Public
# being compacted to as:Public
def patch_payload(payload, patched):
for field in ('attachment', 'cc', 'tag', 'to'):
value = payload.get(field)
if value and not isinstance(value, list):
value = [value]
patched[field] = value
if field in ('cc', 'to'):
try:
idx = value.index('as:Public')
patched[field][idx] = value[idx].replace('as:Public', NAMESPACE_PUBLIC)
except:
pass
if isinstance(payload.get('object'), dict):
patch_payload(payload['object'], patched['object'])
patch_payload(payload, patched)
return patched
def build_context(self, obj):
from federation.entities.activitypub.models import Object, Link
final = [CONTEXT_ACTIVITYSTREAMS]
extensions = {}
def walk_object(obj):
if type(obj) in self._extensions.keys():
self._add_extensions(self._extensions[type(obj)].get(type(obj).__name__, []), final, extensions)
to_add = self._get_fields(obj)
for field in type(obj).schema().declared_fields.keys():
field_value = getattr(obj, field)
if field in to_add.keys():
if field_value is not missing or obj.signable and field == 'signature':
self._add_extensions(to_add[field], final, extensions)
if not isinstance(field_value, list): field_value = [field_value]
for value in field_value:
if issubclass(type(value), (Object, Link)):
walk_object(value)
walk_object(obj)
if extensions: final.append(extensions)
# compact the array if len == 1 to minimize test changes
return final if len(final) > 1 else final[0]
def merge_context(self, ctx):
# One platform sends a single string context
if isinstance(ctx, str): ctx = [ctx]
# add a # at the end of the python-federation string
# for socialhome payloads
s = json.dumps(ctx)
if 'python-federation"' in s:
ctx = json.loads(s.replace('python-federation', 'python-federation#', 1))
# some platforms have http://joinmastodon.com/ns in @context. This
# is not a json-ld document.
try:
ctx.pop(ctx.index('http://joinmastodon.org/ns'))
except:
pass
# remove @language in context since this directive is not
# processed by calamus. Pleroma adds a useless @language: 'und'
# which is discouraged in best practices and in some cases makes
# calamus return dict where str is expected.
# see https://www.rfc-editor.org/rfc/rfc5646, page 56
idx = []
for i, v in enumerate(ctx):
if isinstance(v, dict):
v.pop('@language', None)
if len(v) == 0: idx.insert(0, i)
for i in idx: ctx.pop(i)
# AP activities may be signed, but most platforms don't
# define RsaSignature2017. add it to the context
# hubzilla doesn't define the discoverable property in its context
# include all Mastodon extensions for platforms that only define http://joinmastodon.org/ns in their context
uris = []
defs = {}
# Merge original context dicts in one dict
for item in ctx:
if isinstance(item, str):
uris.append(item)
else:
defs.update(item)
for item in self._merged:
if isinstance(item, str) and item not in uris:
uris.append(item)
elif isinstance(item, dict):
defs.update(item)
final = copy.copy(uris)
final.append(defs)
return final

Wyświetl plik

@ -0,0 +1,101 @@
import datetime
import logging
import math
import re
from base64 import b64encode, b64decode
from copy import copy
from funcy import omit
from pyld import jsonld
from Crypto.Hash import SHA256
from Crypto.PublicKey.RSA import import_key
from Crypto.Signature import pkcs1_15
from federation.utils.activitypub import retrieve_and_parse_document
logger = logging.getLogger("federation")
def create_ld_signature(obj, author):
# Use models.Signature? Maybe overkill...
sig = {
'created': datetime.datetime.now(tz=datetime.timezone.utc).isoformat(timespec='seconds'),
'creator': f'{author.id}#main-key',
'@context':'https://w3id.org/security/v1'
}
try:
private_key = import_key(author.private_key)
except (ValueError, TypeError) as exc:
logger.warning(f'ld_signature - {exc}')
return None
signer = pkcs1_15.new(private_key)
sig_digest = hash(sig)
obj_digest = hash(obj)
digest = (sig_digest + obj_digest).encode('utf-8')
signature = signer.sign(SHA256.new(digest))
sig.update({'type': 'RsaSignature2017', 'signatureValue': b64encode(signature).decode()})
sig.pop('@context')
obj.update({'signature':sig})
def verify_ld_signature(payload):
"""
Verify inbound payload LD signature
"""
signature = copy(payload.get('signature', None))
if not signature:
logger.warning(f'ld_signature - No signature in {payload.get("id", "the payload")}')
return None
# retrieve the author's public key
profile = retrieve_and_parse_document(signature.get('creator'))
if not profile:
logger.warning(f'ld_signature - Failed to retrieve profile for {signature.get("creator")}')
return None
try:
pkey = import_key(profile.public_key)
except ValueError as exc:
logger.warning(f'ld_signature - {exc}')
return None
verifier = pkcs1_15.new(pkey)
# Compute digests and verify signature
sig = omit(signature, ('type', 'signatureValue'))
sig.update({'@context':'https://w3id.org/security/v1'})
sig_digest = hash(sig)
obj = omit(payload, 'signature')
obj_digest = hash(obj)
digest = (sig_digest + obj_digest).encode('utf-8')
sig_value = b64decode(signature.get('signatureValue'))
try:
verifier.verify(SHA256.new(digest), sig_value)
logger.debug(f'ld_signature - {payload.get("id")} has a valid signature')
return profile.id
except ValueError:
logger.warning(f'ld_signature - Invalid signature for {payload.get("id")}')
return None
def hash(obj):
nquads = NormalizedDoubles().normalize(obj, options={'format':'application/nquads','algorithm':'URDNA2015'})
return SHA256.new(nquads.encode('utf-8')).hexdigest()
# We need this to ensure the digests are identical.
class NormalizedDoubles(jsonld.JsonLdProcessor):
def _object_to_rdf(self, item, issuer, triples, rdfDirection):
value = item['@value'] if jsonld._is_value(item) else None
# The ruby rdf_normalize library turns floats with a zero fraction into integers.
if isinstance(value, float) and value == math.floor(value):
item['@value'] = math.floor(value)
obj = super()._object_to_rdf(item, issuer, triples, rdfDirection)
# This is to address https://github.com/digitalbazaar/pyld/issues/175
if obj.get('datatype') == jsonld.XSD_DOUBLE:
obj['value'] = re.sub(r'(\d)0*E\+?(-)?0*(\d)', r'\1E\2\3', obj['value'])
return obj

Wyświetl plik

@ -84,6 +84,6 @@ def message_to_objects(
Takes in a message extracted by a protocol and maps it to entities.
"""
# We only really expect one element here for ActivityPub.
return element_to_objects(message)
return element_to_objects(message, sender)

Wyświetl plik

@ -1,7 +1,7 @@
import copy
import json
import logging
import uuid
from copy import copy
from datetime import timedelta
from typing import List, Dict, Union
from urllib.parse import urlparse
@ -16,8 +16,9 @@ from marshmallow.utils import EXCLUDE, missing
from pyld import jsonld
import federation.entities.base as base
import federation.utils.jsonld_helper
from federation.entities.activitypub.constants import CONTEXT, CONTEXT_SETS, NAMESPACE_PUBLIC
from federation.entities.activitypub.constants import CONTEXT_ACTIVITYSTREAMS, CONTEXT_SECURITY, NAMESPACE_PUBLIC
from federation.entities.activitypub.ldcontext import LdContextManager
from federation.entities.activitypub.ldsigning import create_ld_signature, verify_ld_signature
from federation.entities.mixins import BaseEntity, RawContentMixin
from federation.entities.utils import get_base_attributes, get_profile
from federation.outbound import handle_send
@ -108,7 +109,7 @@ class NormalizedList(fields.List):
# Don't want expanded IRIs to be exposed as dict keys
class CompactedDict(fields.Dict):
ctx = ["https://www.w3.org/ns/activitystreams", "https://w3id.org/security/v1"]
ctx = [CONTEXT_ACTIVITYSTREAMS, CONTEXT_SECURITY]
# may or may not be needed
def _serialize(self, value, attr, obj, **kwargs):
@ -133,12 +134,12 @@ class CompactedDict(fields.Dict):
# calamus sets a XMLSchema#integer type, but different definitions
# maybe used, hence the flavor property
# TODO: handle non negative types
class Integer(fields._JsonLDField, Integer):
class MixedInteger(fields._JsonLDField, Integer):
flavor = None # add fields.IRIReference type hint
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.flavor = kwargs.get('flavor')
self.flavor = self.metadata.get('flavor')
def _serialize(self, value, attr, obj, **kwargs):
value = super()._serialize(value, attr, obj, **kwargs)
@ -227,7 +228,6 @@ OBJECTS = [
'VideoSchema'
]
def set_public(entity):
for attr in [entity.to, entity.cc]:
if isinstance(attr, list):
@ -235,38 +235,31 @@ def set_public(entity):
elif attr == NAMESPACE_PUBLIC: entity.public = True
def add_props_to_attrs(obj, props):
return obj.__dict__
attrs = copy(obj.__dict__)
for prop in props:
attrs.update({prop: getattr(obj, prop, None)})
attrs.pop('_'+prop, None)
attrs.update({'schema': True})
return attrs
class Object(BaseEntity, metaclass=JsonLDAnnotation):
atom_url = fields.String(ostatus.atomUri)
also_known_as = IRI(as2.alsoKnownAs)
also_known_as = IRI(as2.alsoKnownAs,
metadata={'ctx':[{ 'alsoKnownAs':{'@id':'as:alsoKnownAs','@type':'@id'}}]})
icon = MixedField(as2.icon, nested='ImageSchema')
image = MixedField(as2.image, nested='ImageSchema', default='')
image = MixedField(as2.image, nested='ImageSchema')
tag_objects = MixedField(as2.tag, nested=['HashtagSchema','MentionSchema','PropertyValueSchema','EmojiSchema'], many=True)
attachment = fields.Nested(as2.attachment, nested=['ImageSchema', 'AudioSchema', 'DocumentSchema','PropertyValueSchema','IdentityProofSchema'], many=True)
attachment = fields.Nested(as2.attachment, nested=['ImageSchema', 'AudioSchema', 'DocumentSchema','PropertyValueSchema','IdentityProofSchema'],
many=True, default=[])
content_map = LanguageMap(as2.content) # language maps are not implemented in calamus
context = IRI(as2.context)
guid = fields.String(diaspora.guid, default='')
handle = fields.String(diaspora.handle, default='')
context = fields.RawJsonLD(as2.context)
name = fields.String(as2.name, default='')
generator = MixedField(as2.generator, nested=['ApplicationSchema','ServiceSchema'])
created_at = fields.DateTime(as2.published, add_value_types=True)
replies = MixedField(as2.replies, nested=['CollectionSchema','OrderedCollectionSchema'])
signature = MixedField(sec.signature, nested = 'SignatureSchema')
signature = MixedField(sec.signature, nested = 'SignatureSchema',
metadata={'ctx': [CONTEXT_SECURITY,
{'RsaSignature2017':'sec:RsaSignature2017'}]})
start_time = fields.DateTime(as2.startTime, add_value_types=True)
updated = fields.DateTime(as2.updated, add_value_types=True)
to = fields.List(as2.to, cls_or_instance=fields.String(as2.to))
cc = fields.List(as2.cc, cls_or_instance=fields.String(as2.cc))
to = fields.List(as2.to, cls_or_instance=IRI(as2.to), default=[])
cc = fields.List(as2.cc, cls_or_instance=IRI(as2.cc), default=[])
media_type = fields.String(as2.mediaType)
source = CompactedDict(as2.source)
signable = False
# The following properties are defined by some platforms, but are not implemented yet
#audience
@ -279,7 +272,12 @@ class Object(BaseEntity, metaclass=JsonLDAnnotation):
def to_as2(self):
obj = self.activity if isinstance(self.activity, Activity) else self
return jsonld.compact(obj.dump(), CONTEXT)
return context_manager.compact(obj)
def sign_as2(self, sender=None):
obj = self.to_as2()
if self.signable and sender: create_ld_signature(obj, sender)
return obj
@classmethod
def from_base(cls, entity):
@ -315,86 +313,8 @@ class Object(BaseEntity, metaclass=JsonLDAnnotation):
@pre_load
def patch_context(self, data, **kwargs):
if not data.get('@context'): return data
ctx = copy(data['@context'])
# One platform send a single string context
if isinstance(ctx, str): ctx = [ctx]
# add a # at the end of the python-federation string
# for socialhome payloads
s = json.dumps(ctx)
if 'python-federation"' in s:
ctx = json.loads(s.replace('python-federation', 'python-federation#', 1))
# some paltforms have http://joinmastodon.com/ns in @context. This
# is not a json-ld document.
try:
ctx.pop(ctx.index('http://joinmastodon.org/ns'))
except:
pass
# remove @language in context since this directive is not
# processed by calamus. Pleroma adds a useless @language: 'und'
# which is discouraged in best practices and in some cases makes
# calamus return dict where str is expected.
# see https://www.rfc-editor.org/rfc/rfc5646, page 56
idx = []
for i,v in enumerate(ctx):
if isinstance(v, dict):
v.pop('@language',None)
if len(v) == 0: idx.insert(0, i)
for i in idx: ctx.pop(i)
# AP activities may be signed, but most platforms don't
# define RsaSignature2017. add it to the context
# hubzilla doesn't define the discoverable property in its context
# include all Mastodon extensions for platforms that only define http://joinmastodon.org/ns in their context
may_add = {'signature': ['https://w3id.org/security/v1', {'sec':'https://w3id.org/security#','RsaSignature2017':'sec:RsaSignature2017'}],
'publicKey': ['https://w3id.org/security/v1'],
'discoverable': [{'toot':'http://joinmastodon.org/ns#','discoverable': 'toot:discoverable'}], #for hubzilla
'suspended': [{'toot':'http://joinmastodon.org/ns#','suspended': 'toot:suspended'}],
'copiedTo': [{'toot':'http://joinmastodon.org/ns#','copiedTo': 'toot:copiedTo'}], #for hubzilla
'featured': [{'toot':'http://joinmastodon.org/ns#','featured': 'toot:featured'}], #for litepub and pleroma
'featuredTags': [{'toot':'http://joinmastodon.org/ns#','featuredTags': 'toot:featuredTags'}],
'focalPoint': [{'toot':'http://joinmastodon.org/ns#',
'focalPoint': {'@id':'toot:focalPoint','@container':'@list'},
}],
'tag': [{'Hashtag': 'as:Hashtag', #for epicyon
'toot':'http://joinmastodon.org/ns#',
'Emoji':'toot:Emoji'}],
'attachment': [{'schema': 'http://schema.org#', 'PropertyValue': 'schema:PropertyValue', # for owncast
'toot':'http://joinmastodon.org/ns#','blurHash': 'toot:blurHash',
'IdentityProof': 'toot:IdentityProof'}]
}
to_add = [val for key,val in may_add.items() if data.get(key)]
if to_add:
idx = [i for i,v in enumerate(ctx) if isinstance(v, dict)]
if idx:
upd = ctx[idx[0]]
# merge context dicts
if len(idx) > 1:
idx.reverse()
for i in idx[:-1]:
upd.update(ctx[i])
ctx.pop(i)
else:
upd = {}
for add in to_add:
for val in add:
if isinstance(val, str) and val not in ctx:
try:
ctx.append(val)
except AttributeError:
ctx = [ctx, val]
if isinstance(val, dict):
upd.update(val)
if not idx and upd: ctx.append(upd)
# for to and cc fields to be processed as strings
ctx.append(CONTEXT_SETS)
data['@context'] = ctx
ctx = copy.copy(data['@context'])
data['@context'] = context_manager.merge_context(ctx)
return data
# A node without an id isn't true json-ld, but many payloads have
@ -424,7 +344,7 @@ class Collection(Object, base.Collection):
first = MixedField(as2.first, nested=['CollectionPageSchema', 'OrderedCollectionPageSchema'])
current = IRI(as2.current)
last = IRI(as2.last)
total_items = Integer(as2.totalItems, flavor=xsd.nonNegativeInteger, add_value_types=True)
total_items = MixedInteger(as2.totalItems, metafdata={'flavor':xsd.nonNegativeInteger}, add_value_types=True)
class Meta:
rdf_type = as2.Collection
@ -447,7 +367,7 @@ class CollectionPage(Collection):
class OrderedCollectionPage(OrderedCollection, CollectionPage):
start_index = Integer(as2.startIndex, flavor=xsd.nonNegativeInteger, add_value_types=True)
start_index = MixedInteger(as2.startIndex, metadata={'flavor':xsd.nonNegativeInteger}, add_value_types=True)
class Meta:
rdf_type = as2.OrderedCollectionPage
@ -457,10 +377,12 @@ class OrderedCollectionPage(OrderedCollection, CollectionPage):
# AP defines [Ii]mage and [Aa]udio objects/properties, but only a Video object
# seen with Peertube payloads only so far
class Document(Object):
inline = fields.Boolean(pyfed.inlineImage, default=False)
height = Integer(as2.height, default=0, flavor=xsd.nonNegativeInteger, add_value_types=True)
width = Integer(as2.width, default=0, flavor=xsd.nonNegativeInteger, add_value_types=True)
blurhash = fields.String(toot.blurhash)
inline = fields.Boolean(pyfed.inlineImage, default=False,
metadata={'ctx':[{'pyfed':str(pyfed)}]})
height = MixedInteger(as2.height, default=0, metadata={'flavor':xsd.nonNegativeInteger}, add_value_types=True)
width = MixedInteger(as2.width, default=0, metadata={'flavor':xsd.nonNegativeInteger}, add_value_types=True)
blurhash = fields.String(toot.blurHash,
metadata={'ctx':[{'toot':str(toot),'blurHash':'toot:blurHash'}]})
url = MixedField(as2.url, nested='LinkSchema')
def to_base(self):
@ -507,10 +429,10 @@ class Link(metaclass=JsonLDAnnotation):
media_type = fields.String(as2.mediaType)
name = fields.String(as2.name)
href_lang = fields.String(as2.hrefLang)
height = Integer(as2.height, flavor=xsd.nonNegativeInteger, add_value_types=True)
width = Integer(as2.width, flavor=xsd.nonNegativeInteger, add_value_types=True)
fps = Integer(pt.fps, flavor=schema.Number, add_value_types=True)
size = Integer(pt.size, flavor=schema.Number, add_value_types=True)
height = MixedInteger(as2.height, metadata={'flavor':xsd.nonNegativeInteger}, add_value_types=True)
width = MixedInteger(as2.width, metadata={'flavor':xsd.nonNegativeInteger}, add_value_types=True)
fps = MixedInteger(pt.fps, metadata={'flavor':schema.Number}, add_value_types=True)
size = MixedInteger(pt.size, metadata={'flavor':schema.Number}, add_value_types=True)
tag = MixedField(as2.tag, nested=['InfohashSchema', 'LinkSchema'], many=True)
# Not implemented yet
#preview : variable type?
@ -534,6 +456,7 @@ class Link(metaclass=JsonLDAnnotation):
class Hashtag(Link):
ctx = [{'Hashtag': 'as:Hashtag'}]
class Meta:
rdf_type = as2.Hashtag
@ -546,7 +469,9 @@ class Mention(Link):
class PropertyValue(Object):
value = fields.String(schema.value)
value = fields.String(schema.value,
metadata={'ctx':[{'schema':str(schema),'value':'schema:value'}]})
ctx = [{'schema':str(schema),'PropertyValue':'schema:PropertyValue'}]
class Meta:
rdf_type = schema.PropertyValue
@ -555,12 +480,14 @@ class PropertyValue(Object):
class IdentityProof(Object):
signature_value = fields.String(sec.signatureValue)
signing_algorithm = fields.String(sec.signingAlgorithm)
ctx = [CONTEXT_SECURITY]
class Meta:
rdf_type = toot.IdentityProof
class Emoji(Object):
ctx = [{'toot':'http://joinmastodon.org/ns#','Emoji':'toot:Emoji'}]
class Meta:
rdf_type = toot.Emoji
@ -572,26 +499,40 @@ class Person(Object, base.Profile):
outbox = IRI(as2.outbox)
following = IRI(as2.following)
followers = IRI(as2.followers)
guid = fields.String(diaspora.guid, metadata={'ctx':[{'diaspora':str(diaspora)}]})
handle = fields.String(diaspora.handle, metadata={'ctx':[{'diaspora':str(diaspora)}]})
username = fields.String(as2.preferredUsername)
endpoints = CompactedDict(as2.endpoints)
shared_inbox = IRI(as2.sharedInbox) # misskey adds this
url = MixedField(as2.url, nested='LinkSchema')
playlists = IRI(pt.playlists)
featured = IRI(toot.featured)
featuredTags = IRI(toot.featuredTags)
manuallyApprovesFollowers = fields.Boolean(as2.manuallyApprovesFollowers, default=False)
discoverable = fields.Boolean(toot.discoverable)
featured = IRI(toot.featured,
metadata={'ctx':[{'toot':str(toot),
'featured': {'@id':'toot:featured','@type':'@id'}}]})
featured_tags = IRI(toot.featuredTags,
metadata={'ctx':[{'toot':str(toot),
'featuredTags': {'@id':'toot:featuredTags','@type':'@id'}}]})
manually_approves_followers = fields.Boolean(as2.manuallyApprovesFollowers, default=False,
metadata={'ctx':[{'manuallyApprovesFollowers':'as:manuallyApprovesFollowers'}]})
discoverable = fields.Boolean(toot.discoverable,
metadata={'ctx':[{'toot':str(toot),
'discoverable': 'toot:discoverable'}]})
devices = IRI(toot.devices)
public_key_dict = CompactedDict(sec.publicKey)
raw_content = fields.String(as2.summary, default="")
public_key_dict = CompactedDict(sec.publicKey,
metadata={'ctx':[CONTEXT_SECURITY]})
raw_content = fields.String(as2.summary, default='')
has_address = MixedField(vcard.hasAddress, nested='HomeSchema')
has_instant_message = fields.List(vcard.hasInstantMessage, cls_or_instance=fields.String)
address = fields.String(vcard.Address)
is_cat = fields.Boolean(misskey.isCat)
moved_to = IRI(as2.movedTo)
copied_to = IRI(toot.copiedTo)
moved_to = IRI(as2.movedTo,
metadata={'ctx':[{'movedTo':{'@id':'as:movedTo','@type':'@id'}}]})
copied_to = IRI(as2.copiedTo,
metadata={'ctx':[{'copiedTo':{'@id':'as:copiedTo','@type':'@id'}}]})
capabilities = CompactedDict(litepub.capabilities)
suspended = fields.Boolean(toot.suspended)
suspended = fields.Boolean(toot.suspended,
metadata={'ctx':[{'toot':str(toot),
'suspended': 'toot:suspended'}]})
public = True
_cached_inboxes = None
_cached_public_key = None
@ -622,7 +563,7 @@ class Person(Object, base.Profile):
if get_profile_id_from_webfinger(finger) == self.id:
self.finger = finger
# multi-protocol platform
if self.finger and self.guid and not self.handle:
if self.finger and self.guid is not missing and self.handle is missing:
self.handle = self.finger
def to_as2(self):
@ -678,10 +619,11 @@ class Person(Object, base.Profile):
def image_urls(self):
if getattr(self, 'icon', None):
icon = self.icon if not isinstance(self.icon, list) else self.icon[0]
url = icon if isinstance(icon, str) else icon.url
self._cached_image_urls = {
'small': icon.url,
'medium': icon.url,
'large': icon.url
'small': url,
'medium': url,
'large': url
}
return self._cached_image_urls
@ -737,14 +679,22 @@ class Note(Object, RawContentMixin):
id = fields.Id()
actor_id = IRI(as2.attributedTo)
target_id = IRI(as2.inReplyTo, default=None)
conversation = fields.RawJsonLD(ostatus.conversation)
conversation = fields.RawJsonLD(ostatus.conversation,
metadata={'ctx':[{'ostatus':str(ostatus),
'conversation':'ostatus:conversation'}]})
entity_type = 'Post'
in_reply_to_atom_uri = IRI(ostatus.inReplyToAtomUri)
sensitive = fields.Boolean(as2.sensitive, default=False)
guid = fields.String(diaspora.guid, metadata={'ctx':[{'diaspora':str(diaspora)}]})
in_reply_to_atom_uri = IRI(ostatus.inReplyToAtomUri,
metadata={'ctx':[{'ostatus':str(ostatus),
'inReplyToAtomUri':'ostatus:inReplyToAtomUri'}]})
sensitive = fields.Boolean(as2.sensitive, default=False,
metadata={'ctx':[{'sensitive':'as:sensitive'}]})
summary = fields.String(as2.summary)
url = IRI(as2.url)
_cached_raw_content = ''
_cached_children = []
signable = True
def __init__(self, *args, **kwargs):
self.tag_objects = [] # mutable objects...
@ -752,7 +702,8 @@ class Note(Object, RawContentMixin):
self._allowed_children += (base.Audio, base.Video)
def to_as2(self):
self.sensitive = 'nsfw' in self.tags
#self.sensitive = 'nsfw' in self.tags
self.url = self.id
edited = False
if hasattr(self, 'times'):
@ -779,7 +730,8 @@ class Note(Object, RawContentMixin):
def to_base(self):
kwargs = get_base_attributes(self, keep=(
'_mentions', '_media_type', '_rendered_content', '_cached_children', '_cached_raw_content'))
'_mentions', '_media_type', '_rendered_content', '_source_object',
'_cached_children', '_cached_raw_content'))
entity = Comment(**kwargs) if getattr(self, 'target_id') else Post(**kwargs)
# Plume (and maybe other platforms) send the attrbutedTo field as an array
if isinstance(entity.actor_id, list): entity.actor_id = entity.actor_id[0]
@ -958,13 +910,11 @@ class Note(Object, RawContentMixin):
class Meta:
rdf_type = as2.Note
exclude = ('handle',)
class Post(Note, base.Post):
class Meta:
rdf_type = as2.Note
exclude = ('handle',)
class Comment(Note, base.Comment):
@ -978,7 +928,6 @@ class Comment(Note, base.Comment):
class Meta:
rdf_type = as2.Note
exclude = ('handle',)
class Article(Note):
@ -996,6 +945,7 @@ class Video(Document, base.Video):
id = fields.Id()
actor_id = MixedField(as2.attributedTo, nested=['PersonSchema', 'GroupSchema'], many=True)
url = MixedField(as2.url, nested='LinkSchema')
signable = True
class Meta:
unknown = EXCLUDE # required until all the pt fields are defined
@ -1031,7 +981,8 @@ class Video(Document, base.Video):
self.actor_id = new_act[0]
entity = Post(**get_base_attributes(self,
keep=('_mentions', '_media_type', '_rendered_content', '_cached_children', '_cached_raw_content')))
keep=('_mentions', '_media_type', '_rendered_content',
'_cached_children', '_cached_raw_content', '_source_object')))
set_public(entity)
return entity
#Some Video object
@ -1141,7 +1092,7 @@ class Follow(Activity, base.Follow):
class Meta:
rdf_type = as2.Follow
exclude = ('created_at', 'handle')
exclude = ('created_at',)
class Announce(Activity, base.Share):
@ -1176,14 +1127,20 @@ class Announce(Activity, base.Share):
self.target_id = self.id
self.entity_type = 'Object'
self.__dict__.update({'schema': True})
entity = base.Retraction(**get_base_attributes(self))
entity = Retraction(**get_base_attributes(self, keep=('_source_object',)))
set_public(entity)
return entity
class Meta:
rdf_type = as2.Announce
# Only used for inbound share retraction (undo announce)
class Retraction(Announce, base.Retraction):
class Meta:
rdf_type = as2.Announce
class Tombstone(Object, base.Retraction):
target_id = fields.Id()
@ -1354,7 +1311,7 @@ def extract_replies(replies):
return objs
def element_to_objects(element: Union[Dict, Object]) -> List:
def element_to_objects(element: Union[Dict, Object], sender: str = "") -> List:
"""
Transform an Element to a list of entities.
"""
@ -1372,7 +1329,14 @@ def element_to_objects(element: Union[Dict, Object]) -> List:
extract_and_validate(entity)
except ValueError as ex:
logger.error("Failed to validate entity %s: %s", entity, ex)
return None
return []
# Always verify the LD signature, for monitoring purposes
actor = verify_ld_signature(entity._source_object)
if entity.signable and sender not in (entity.id, getattr(entity, 'actor_id', None), ''):
# Relayed payload
if not actor:
logger.warning(f'no or invalid signature for a relayed payload, fetching {entity.id}')
entity = retrieve_and_parse_document(entity.id)
logger.info('Entity type "%s" was handled through the json-ld processor', entity.__class__.__name__)
return [entity]
elif entity:
@ -1385,11 +1349,11 @@ def element_to_objects(element: Union[Dict, Object]) -> List:
def model_to_objects(payload):
original_payload = copy.copy(payload)
model = globals().get(payload.get('type'))
if model and issubclass(model, Object):
try:
entity = model.schema().load(payload)
entity._source_object = payload
except (KeyError, jsonld.JsonLdError, exceptions.ValidationError) as exc : # Just give up for now. This must be made robust
logger.error(f"Error parsing jsonld payload ({exc})")
return None
@ -1397,6 +1361,19 @@ def model_to_objects(payload):
if isinstance(getattr(entity, 'object_', None), Object):
entity.object_.activity = entity
entity = entity.object_
entity._source_object = original_payload
return entity
return None
CLASSES_WITH_CONTEXT_EXTENSIONS = (
Document,
Emoji,
Hashtag,
IdentityProof,
Note,
Person,
PropertyValue
)
context_manager = LdContextManager(CLASSES_WITH_CONTEXT_EXTENSIONS)

Wyświetl plik

@ -5,6 +5,7 @@ import warnings
from typing import List, Set, Union, Dict, Tuple
from commonmark import commonmark
from marshmallow import missing
from federation.entities.activitypub.enums import ActivityType
from federation.entities.utils import get_name_for_profile, get_profile
@ -100,7 +101,7 @@ class BaseEntity:
def _validate_required(self, attributes):
"""Ensure required attributes are present."""
required_fulfilled = set(self._required).issubset(set(attributes))
if not required_fulfilled:
if not required_fulfilled or required_fulfilled is missing:
raise ValueError(
"Not all required attributes fulfilled. Required: {required}".format(required=set(self._required))
)
@ -115,7 +116,7 @@ class BaseEntity:
attrs_to_check = set(self._required) & set(attributes)
for attr in attrs_to_check:
value = getattr(self, attr) # We should always have a value here
if value is None or value == "":
if value is None or value == "" or value is missing:
raise ValueError(
"Attribute %s cannot be None or an empty string since it is required." % attr
)

Wyświetl plik

@ -15,7 +15,9 @@ def get_base_attributes(entity, keep=()):
cls = entity.__class__
for attr, _ in inspect.getmembers(cls, lambda o: not isinstance(o, property) and not inspect.isroutine(o)):
if not attr.startswith("_") or attr in keep:
attributes[attr] = getattr(entity, attr)
value = getattr(entity, attr)
if value or isinstance(value, bool):
attributes[attr] = value
return attributes

Wyświetl plik

@ -8,7 +8,7 @@ from Crypto.PublicKey.RSA import RsaKey
from federation.entities.activitypub.enums import ActorType
from federation.entities.mixins import BaseEntity
from federation.protocols.activitypub.signing import verify_request_signature, verify_ld_signature, create_ld_signature
from federation.protocols.activitypub.signing import verify_request_signature
from federation.types import UserType, RequestType
from federation.utils.text import decode_if_bytes
@ -43,6 +43,7 @@ class Protocol:
get_contact_key = None
payload = None
request = None
sender = None
user = None
def build_send(self, entity: BaseEntity, from_user: UserType, to_user_key: RsaKey = None) -> Union[str, Dict]:
@ -58,8 +59,7 @@ class Protocol:
# Use pregenerated outbound document
rendered = entity.outbound_doc
else:
rendered = entity.to_as2()
create_ld_signature(rendered, from_user)
rendered = entity.sign_as2(sender=from_user)
return rendered
def extract_actor(self):
@ -87,14 +87,9 @@ class Protocol:
# Verify the message is from who it claims to be
if not skip_author_verification:
try:
self.verify_signature()
# Verify the HTTP signature
self.sender = verify_request_signature(self.request)
except (ValueError, KeyError, InvalidSignature) as exc:
logger.warning(f'Signature verification failed: {exc}')
logger.warning(f'HTTP signature verification failed: {exc}')
return self.actor, {}
return self.actor, self.payload
def verify_signature(self):
# Verify the HTTP signature
self.actor = verify_request_signature(self.request)
# Verify the LD signature (not currently enforced)
verify_ld_signature(self.payload)
return self.sender, self.payload

Wyświetl plik

@ -5,24 +5,14 @@ https://funkwhale.audio/
"""
import datetime
import logging
import math
import re
from base64 import b64encode, b64decode
from copy import copy
from funcy import omit
from pyld import jsonld
from typing import Union
from urllib.parse import urlsplit
import pytz
from Crypto.Hash import SHA256
from Crypto.PublicKey.RSA import RsaKey, import_key
from Crypto.Signature import pkcs1_15
from Crypto.PublicKey.RSA import RsaKey
from httpsig.sign_algorithms import PSS
from httpsig.requests_auth import HTTPSignatureAuth
from httpsig.verify import HeaderVerifier
import federation.utils.jsonld_helper
from federation.types import RequestType
from federation.utils.network import parse_http_date
from federation.utils.text import encode_if_text
@ -91,87 +81,7 @@ def verify_request_signature(request: RequestType, required: bool=True):
return signer.id
def create_ld_signature(obj, author):
# Use models.Signature? Maybe overkill...
sig = {
'created': datetime.datetime.now(tz=datetime.timezone.utc).isoformat(timespec='seconds'),
'creator': f'{author.id}#main-key',
'@context':'https://w3id.org/security/v1'
}
try:
private_key = import_key(author.private_key)
except (ValueError, TypeError) as exc:
logger.warning(f'ld_signature - {exc}')
return None
signer = pkcs1_15.new(private_key)
sig_nquads = normalize(sig, options={'format':'application/nquads','algorithm':'URDNA2015'}).encode('utf-8')
sig_digest = SHA256.new(sig_nquads).hexdigest()
obj_nquads = normalize(obj, options={'format':'application/nquads','algorithm':'URDNA2015'}).encode('utf-8')
obj_digest = SHA256.new(obj_nquads).hexdigest()
digest = (sig_digest + obj_digest).encode('utf-8')
signature = signer.sign(SHA256.new(digest))
sig.update({'type': 'RsaSignature2017', 'signatureValue': b64encode(signature).decode()})
sig.pop('@context')
obj.update({'signature':sig})
def verify_ld_signature(payload):
"""
Verify inbound payload LD signature
"""
signature = copy(payload.get('signature'))
if not signature:
logger.warning('ld_signature - No LD signature in the payload')
return None # Maybe False would be better?
# retrieve the author's public key
from federation.utils.activitypub import retrieve_and_parse_document
profile = retrieve_and_parse_document(signature.get('creator'))
if not profile:
logger.warning(f'ld_signature - Failed to retrieve profile for {signature.get("creator")}')
return None
try:
pkey = import_key(profile.public_key)
except ValueError as exc:
logger.warning(f'ld_signature - {exc}')
return None
verifier = pkcs1_15.new(pkey)
# Compute digests and verify signature
sig = omit(signature, ('type', 'signatureValue'))
sig.update({'@context':'https://w3id.org/security/v1'})
sig_nquads = normalize(sig, options={'format':'application/nquads','algorithm':'URDNA2015'}).encode('utf-8')
sig_digest = SHA256.new(sig_nquads).hexdigest()
obj = omit(payload, 'signature')
obj_nquads = normalize(obj, options={'format':'application/nquads','algorithm':'URDNA2015'}).encode('utf-8')
obj_digest = SHA256.new(obj_nquads).hexdigest()
digest = (sig_digest + obj_digest).encode('utf-8')
sig_value = b64decode(signature.get('signatureValue'))
try:
verifier.verify(SHA256.new(digest), sig_value)
logger.debug(f'ld_signature - {payload.get("id")} has a valid signature')
except ValueError as exc:
logger.warning(f'ld_signature - invalid signature for {payload.get("id")}')
# We need this to ensure the digests are identical.
def normalize(input_, options):
return NormalizedDoubles().normalize(input_, options)
class NormalizedDoubles(jsonld.JsonLdProcessor):
def _object_to_rdf(self, item, issuer, triples, rdfDirection):
value = item['@value'] if jsonld._is_value(item) else None
# The ruby rdf_normalize library turns floats with a zero fraction to integers.
if isinstance(value, float) and value == math.floor(value):
item['@value'] = math.floor(value)
obj = super()._object_to_rdf(item, issuer, triples, rdfDirection)
# This is to address https://github.com/digitalbazaar/pyld/issues/175
if obj.get('datatype') == jsonld.XSD_DOUBLE:
obj['value'] = re.sub(r'(\d)0*E\+?(-)?0*(\d)', r'\1E\2\3', obj['value'])
return obj

Wyświetl plik

@ -5,7 +5,7 @@ from pprint import pprint
# noinspection PyPackageRequirements
from Crypto.PublicKey.RSA import RsaKey
from federation.entities.activitypub.constants import CONTEXT
from federation.entities.activitypub.models import context_manager
from federation.entities.activitypub.models import Accept
from federation.tests.fixtures.keys import PUBKEY
from federation.types import UserType
@ -15,7 +15,7 @@ class TestEntitiesConvertToAS2:
def test_accept_to_as2(self, activitypubaccept):
result = activitypubaccept.to_as2()
assert result == {
"@context": CONTEXT,
"@context": context_manager.build_context(activitypubaccept),
"id": "https://localhost/accept",
"type": "Accept",
"actor": "https://localhost/profile",
@ -30,7 +30,7 @@ class TestEntitiesConvertToAS2:
def test_announce_to_as2(self, activitypubannounce):
result = activitypubannounce.to_as2()
assert result == {
"@context": CONTEXT,
"@context": context_manager.build_context(activitypubannounce),
"id": "http://127.0.0.1:8000/post/123456/#create",
"type": "Announce",
"actor": "http://127.0.0.1:8000/profile/123456/",
@ -42,13 +42,14 @@ class TestEntitiesConvertToAS2:
activitypubcomment.pre_send()
result = activitypubcomment.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubcomment),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
'object': {
'id': 'http://127.0.0.1:8000/post/123456/',
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<p>raw_content</p>',
'published': '2019-04-27T00:00:00',
@ -67,13 +68,14 @@ class TestEntitiesConvertToAS2:
activitypubcomment.pre_send()
result = activitypubcomment.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubcomment),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
'object': {
'id': 'http://127.0.0.1:8000/post/123456/',
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<p>raw_content <a href="http://example.com" rel="nofollow" target="_blank">'
'http://example.com</a></p>',
@ -91,7 +93,7 @@ class TestEntitiesConvertToAS2:
def test_follow_to_as2(self, activitypubfollow):
result = activitypubfollow.to_as2()
assert result == {
"@context": CONTEXT,
"@context": context_manager.build_context(activitypubfollow),
"id": "https://localhost/follow",
"type": "Follow",
"actor": "https://localhost/profile",
@ -103,7 +105,7 @@ class TestEntitiesConvertToAS2:
result["id"] = "https://localhost/undo" # Real object will have a random UUID postfix here
result["object"]["id"] = "https://localhost/follow" # Real object will have a random UUID postfix here
assert result == {
"@context": CONTEXT,
"@context": context_manager.build_context(activitypubundofollow),
"id": "https://localhost/undo",
"type": "Undo",
"actor": "https://localhost/profile",
@ -119,7 +121,7 @@ class TestEntitiesConvertToAS2:
activitypubpost.pre_send()
result = activitypubpost.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubpost),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
@ -130,6 +132,7 @@ class TestEntitiesConvertToAS2:
'cc': ['https://http://127.0.0.1:8000/profile/123456/followers/'],
'to': ['https://www.w3.org/ns/activitystreams#Public'],
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<h1>raw_content</h1>',
'published': '2019-04-27T00:00:00',
@ -148,13 +151,14 @@ class TestEntitiesConvertToAS2:
activitypubpost_mentions.pre_send()
result = activitypubpost_mentions.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubpost_mentions),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
'object': {
'id': 'http://127.0.0.1:8000/post/123456/',
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<h1>raw_content</h1>\n<p>@{someone@localhost.local} @<a class="mention" '
'href="http://localhost.local/someone" rel="nofollow" target="_blank">'
@ -190,13 +194,14 @@ class TestEntitiesConvertToAS2:
activitypubpost_tags.pre_send()
result = activitypubpost_tags.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubpost_tags),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
'object': {
'id': 'http://127.0.0.1:8000/post/123456/',
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<h1>raw_content</h1>\n'
'<p><a class="mention hashtag" '
@ -233,13 +238,14 @@ class TestEntitiesConvertToAS2:
activitypubpost_images.pre_send()
result = activitypubpost_images.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubpost_images),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
'object': {
'id': 'http://127.0.0.1:8000/post/123456/',
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<p>raw_content</p>',
'published': '2019-04-27T00:00:00',
@ -271,7 +277,7 @@ class TestEntitiesConvertToAS2:
activitypubpost_diaspora_guid.pre_send()
result = activitypubpost_diaspora_guid.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubpost_diaspora_guid),
'type': 'Create',
'id': 'http://127.0.0.1:8000/post/123456/#create',
'actor': 'http://127.0.0.1:8000/profile/123456/',
@ -279,6 +285,7 @@ class TestEntitiesConvertToAS2:
'id': 'http://127.0.0.1:8000/post/123456/',
'diaspora:guid': 'totallyrandomguid',
'type': 'Note',
'url': 'http://127.0.0.1:8000/post/123456/',
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
'content': '<p>raw_content</p>',
'published': '2019-04-27T00:00:00',
@ -295,7 +302,7 @@ class TestEntitiesConvertToAS2:
def test_profile_to_as2(self, activitypubprofile):
result = activitypubprofile.to_as2()
assert result == {
"@context": CONTEXT,
"@context": context_manager.build_context(activitypubprofile),
"endpoints": {
"sharedInbox": "https://example.com/public",
},
@ -327,7 +334,7 @@ class TestEntitiesConvertToAS2:
def test_profile_to_as2__with_diaspora_guid(self, activitypubprofile_diaspora_guid):
result = activitypubprofile_diaspora_guid.to_as2()
assert result == {
"@context": CONTEXT,
"@context": context_manager.build_context(activitypubprofile_diaspora_guid),
"endpoints": {
"sharedInbox": "https://example.com/public",
},
@ -360,7 +367,7 @@ class TestEntitiesConvertToAS2:
def test_retraction_to_as2(self, activitypubretraction):
result = activitypubretraction.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubretraction),
'type': 'Delete',
'id': 'http://127.0.0.1:8000/post/123456/#delete',
'actor': 'http://127.0.0.1:8000/profile/123456/',
@ -374,7 +381,7 @@ class TestEntitiesConvertToAS2:
def test_retraction_to_as2__announce(self, activitypubretraction_announce):
result = activitypubretraction_announce.to_as2()
assert result == {
'@context': CONTEXT,
'@context': context_manager.build_context(activitypubretraction_announce),
'type': 'Undo',
'id': 'http://127.0.0.1:8000/post/123456/#delete',
'actor': 'http://127.0.0.1:8000/profile/123456/',

Wyświetl plik

@ -187,7 +187,7 @@ class TestActivitypubEntityMappersReceive:
"private": "https://diaspodon.fr/users/jaywink/inbox",
"public": "https://diaspodon.fr/inbox",
}
assert profile.handle == ""
assert profile.handle == None
assert profile.name == "Jason Robinson"
assert profile.image_urls == {
"large": "https://diaspodon.fr/system/accounts/avatars/000/033/155/original/pnc__picked_media_be51984c-4"

Wyświetl plik

@ -13,22 +13,17 @@ from federation.entities.utils import get_base_attributes
class TestGetBaseAttributes:
def test_get_base_attributes_returns_only_intended_attributes(self):
entity = Post()
def test_get_base_attributes_returns_only_intended_attributes(self, diasporapost, diasporaprofile):
entity = diasporapost
attrs = get_base_attributes(entity).keys()
assert set(attrs) == {
"created_at", "location", "provider_display_name", "public", "raw_content",
"signature", "base_url", "actor_id", "id", "handle", "guid", "activity", "activity_id",
"url", "mxid", "times", "to", "cc", "finger",
}
entity = Profile()
'activity', 'actor_id', 'created_at', 'guid', 'handle', 'id',
'provider_display_name', 'public', 'raw_content'}
entity = diasporaprofile
attrs = get_base_attributes(entity).keys()
assert set(attrs) == {
"created_at", "name", "email", "gender", "raw_content", "location", "public",
"nsfw", "public_key", "image_urls", "tag_list", "signature", "url", "atom_url",
"base_url", "id", "actor_id", "handle", "handle", "guid", "activity", "activity_id", "username",
"inboxes", "mxid", "times", "to", "cc", "finger",
}
'created_at', 'guid', 'handle', 'id', 'image_urls', 'inboxes',
'name', 'nsfw', 'public', 'raw_content', 'tag_list'}
class TestGetFullXMLRepresentation:

Wyświetl plik

@ -79,11 +79,13 @@ class TestRetrieveAndParseDocument:
assert entity._children[0].url == "https://files.mastodon.social/media_attachments/files/017/792/237/original" \
"/foobar.jpg"
@patch("federation.entities.activitypub.models.verify_ld_signature", return_value=None)
@patch("federation.entities.activitypub.models.extract_receivers", return_value=[])
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(
json.dumps(ACTIVITYPUB_POST), None, None),
)
def test_returns_entity_for_valid_document__post__wrapped_in_activity(self, mock_fetch, mock_recv):
def test_returns_entity_for_valid_document__post__wrapped_in_activity(
self, mock_fetch, mock_recv, mock_sign):
entity = retrieve_and_parse_document("https://example.com/foobar")
assert isinstance(entity, Note)

Wyświetl plik

@ -1,36 +0,0 @@
import json
from datetime import timedelta
from pyld import jsonld
from federation.utils.django import get_redis
cache = get_redis() or {}
EXPIRATION = int(timedelta(weeks=4).total_seconds())
# This is required to workaround a bug in pyld that has the Accept header
# accept other content types. From what I understand, precedence handling
# is broken
# from https://github.com/digitalbazaar/pyld/issues/133
# cacheing loosely inspired by https://github.com/digitalbazaar/pyld/issues/70
def get_loader(*args, **kwargs):
requests_loader = jsonld.requests_document_loader(*args, **kwargs)
def loader(url, options={}):
key = f'ld_cache:{url}'
try:
return json.loads(cache[key])
except KeyError:
options['headers']['Accept'] = 'application/ld+json'
doc = requests_loader(url, options)
if isinstance(cache, dict):
cache[url] = json.dumps(doc)
else:
cache.set(f'ld_cache:{url}', json.dumps(doc), ex=EXPIRATION)
return doc
return loader
jsonld.set_document_loader(get_loader())