2017-08-13 07:12:16 +00:00
|
|
|
"""Handles requests for ActivityPub endpoints: actors, inbox, etc.
|
|
|
|
"""
|
2023-02-15 18:57:11 +00:00
|
|
|
from base64 import b64encode
|
2018-10-19 13:50:00 +00:00
|
|
|
import datetime
|
2023-02-15 18:57:11 +00:00
|
|
|
from hashlib import sha256
|
2017-08-13 07:12:16 +00:00
|
|
|
import logging
|
2021-07-10 15:07:40 +00:00
|
|
|
import re
|
2023-02-13 06:17:04 +00:00
|
|
|
import threading
|
2017-08-13 07:12:16 +00:00
|
|
|
|
2023-02-13 06:17:04 +00:00
|
|
|
from cachetools import LRUCache
|
2021-07-10 15:07:40 +00:00
|
|
|
from flask import request
|
2019-12-26 06:20:57 +00:00
|
|
|
from google.cloud import ndb
|
2022-11-21 20:14:37 +00:00
|
|
|
from google.cloud.ndb import OR
|
2023-01-29 22:13:58 +00:00
|
|
|
from granary import as1, as2
|
2023-02-15 18:57:11 +00:00
|
|
|
from httpsig import HeaderVerifier
|
2023-02-16 04:10:17 +00:00
|
|
|
from httpsig.utils import parse_signature_header
|
2021-08-28 14:18:46 +00:00
|
|
|
from oauth_dropins.webutil import flask_util, util
|
2021-08-06 17:29:25 +00:00
|
|
|
from oauth_dropins.webutil.flask_util import error
|
2019-12-25 07:26:58 +00:00
|
|
|
from oauth_dropins.webutil.util import json_dumps, json_loads
|
2017-08-13 07:12:16 +00:00
|
|
|
|
2021-07-10 15:07:40 +00:00
|
|
|
from app import app, cache
|
2017-08-15 06:07:24 +00:00
|
|
|
import common
|
2023-02-14 16:25:41 +00:00
|
|
|
from common import CACHE_TIME, host_url, redirect_unwrap, redirect_wrap, TLD_BLOCKLIST
|
2023-02-01 21:19:41 +00:00
|
|
|
from models import Follower, Object, Target, User
|
2017-08-15 06:07:24 +00:00
|
|
|
|
2022-02-12 06:38:56 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2017-10-17 05:21:13 +00:00
|
|
|
SUPPORTED_TYPES = (
|
2018-10-25 04:44:21 +00:00
|
|
|
'Accept',
|
2017-10-17 05:21:13 +00:00
|
|
|
'Announce',
|
|
|
|
'Article',
|
|
|
|
'Audio',
|
2018-10-15 15:09:36 +00:00
|
|
|
'Create',
|
2020-03-01 04:28:53 +00:00
|
|
|
'Delete',
|
2018-10-19 13:50:00 +00:00
|
|
|
'Follow',
|
2017-10-17 05:21:13 +00:00
|
|
|
'Image',
|
|
|
|
'Like',
|
|
|
|
'Note',
|
2019-08-01 14:32:45 +00:00
|
|
|
'Undo',
|
2023-01-26 03:44:48 +00:00
|
|
|
'Update',
|
2017-10-17 05:21:13 +00:00
|
|
|
'Video',
|
|
|
|
)
|
2023-02-12 03:58:07 +00:00
|
|
|
FETCH_OBJECT_TYPES = (
|
|
|
|
'Announce',
|
|
|
|
)
|
2017-08-15 06:07:24 +00:00
|
|
|
|
2023-02-13 06:17:04 +00:00
|
|
|
# activity ids that we've already handled and can now ignore
|
|
|
|
seen_ids = LRUCache(100000)
|
|
|
|
seen_ids_lock = threading.Lock()
|
|
|
|
|
2017-10-20 14:13:04 +00:00
|
|
|
|
2021-07-12 21:19:56 +00:00
|
|
|
@app.get(f'/<regex("{common.DOMAIN_RE}"):domain>')
|
2023-02-14 16:25:41 +00:00
|
|
|
@flask_util.cached(cache, CACHE_TIME)
|
2021-07-10 15:07:40 +00:00
|
|
|
def actor(domain):
|
2023-02-14 16:25:41 +00:00
|
|
|
"""Serves a user's AS2 actor from the datastore."""
|
2023-02-08 02:25:24 +00:00
|
|
|
tld = domain.split('.')[-1]
|
|
|
|
if tld in TLD_BLOCKLIST:
|
|
|
|
error('', status=404)
|
|
|
|
|
|
|
|
user = User.get_by_id(domain)
|
|
|
|
if not user:
|
|
|
|
return f'User {domain} not found', 404
|
2023-02-14 16:25:41 +00:00
|
|
|
elif not user.actor_as2:
|
|
|
|
return f'User {domain} not fully set up', 404
|
|
|
|
|
|
|
|
# TODO: unify with common.actor()
|
|
|
|
actor = {
|
2023-02-24 13:25:29 +00:00
|
|
|
**common.postprocess_as2(user.actor_as2, user=user),
|
2023-02-14 16:25:41 +00:00
|
|
|
'id': host_url(domain),
|
|
|
|
# This has to be the domain for Mastodon etc interop! It seems like it
|
|
|
|
# should be the custom username from the acct: u-url in their h-card,
|
|
|
|
# but that breaks Mastodon's Webfinger discovery. Background:
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/302#issuecomment-1324305460
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/77
|
|
|
|
'preferredUsername': domain,
|
|
|
|
'inbox': host_url(f'{domain}/inbox'),
|
|
|
|
'outbox': host_url(f'{domain}/outbox'),
|
|
|
|
'following': host_url(f'{domain}/following'),
|
|
|
|
'followers': host_url(f'{domain}/followers'),
|
|
|
|
'endpoints': {
|
|
|
|
'sharedInbox': host_url('inbox'),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info(f'Returning: {json_dumps(actor, indent=2)}')
|
|
|
|
return actor, {
|
2023-01-07 05:01:33 +00:00
|
|
|
'Content-Type': as2.CONTENT_TYPE,
|
2021-07-10 15:07:40 +00:00
|
|
|
'Access-Control-Allow-Origin': '*',
|
2023-02-14 16:25:41 +00:00
|
|
|
}
|
2021-07-10 15:07:40 +00:00
|
|
|
|
2017-08-13 07:12:16 +00:00
|
|
|
|
2023-01-19 04:32:23 +00:00
|
|
|
@app.post('/inbox')
|
2021-07-12 21:19:56 +00:00
|
|
|
@app.post(f'/<regex("{common.DOMAIN_RE}"):domain>/inbox')
|
2022-11-17 15:36:01 +00:00
|
|
|
def inbox(domain=None):
|
2022-11-16 18:09:24 +00:00
|
|
|
"""Handles ActivityPub inbox delivery."""
|
2021-07-10 15:07:40 +00:00
|
|
|
body = request.get_data(as_text=True)
|
|
|
|
|
|
|
|
# parse and validate AS2 activity
|
|
|
|
try:
|
|
|
|
activity = request.json
|
|
|
|
assert activity
|
|
|
|
except (TypeError, ValueError, AssertionError):
|
2023-02-15 23:48:12 +00:00
|
|
|
error(f"Couldn't parse body as JSON: {body}", exc_info=True)
|
2021-07-10 15:07:40 +00:00
|
|
|
|
2023-02-19 01:53:27 +00:00
|
|
|
|
2022-11-29 01:25:39 +00:00
|
|
|
type = activity.get('type')
|
|
|
|
actor = activity.get('actor')
|
|
|
|
actor_id = actor.get('id') if isinstance(actor, dict) else actor
|
|
|
|
logger.info(f'Got {type} activity from {actor_id}: {json_dumps(activity, indent=2)}')
|
2022-11-05 20:34:58 +00:00
|
|
|
|
2023-02-12 06:23:01 +00:00
|
|
|
obj_as2 = activity.get('object') or {}
|
|
|
|
if isinstance(obj_as2, str):
|
|
|
|
obj_as2 = {'id': obj_as2}
|
2021-07-10 15:07:40 +00:00
|
|
|
|
2023-02-12 06:53:50 +00:00
|
|
|
id = activity.get('id')
|
|
|
|
if not id:
|
|
|
|
error('Activity has no id')
|
|
|
|
|
2023-02-13 05:58:59 +00:00
|
|
|
# short circuit if we've already seen this activity id
|
2023-02-13 06:17:04 +00:00
|
|
|
with seen_ids_lock:
|
2023-02-14 03:10:01 +00:00
|
|
|
already_seen = id in seen_ids
|
|
|
|
seen_ids[id] = True
|
|
|
|
if already_seen or Object.get_by_id(id):
|
|
|
|
msg = f'Already handled this activity {id}'
|
2023-02-14 23:38:42 +00:00
|
|
|
logger.info(msg)
|
2023-02-14 03:10:01 +00:00
|
|
|
return msg, 200
|
2023-02-13 05:58:59 +00:00
|
|
|
|
2023-02-19 01:53:27 +00:00
|
|
|
activity_unwrapped = redirect_unwrap(activity)
|
2023-02-24 13:25:29 +00:00
|
|
|
activity_obj = Object(id=id, as2=activity_unwrapped,
|
|
|
|
source_protocol='activitypub')
|
2023-02-12 06:53:50 +00:00
|
|
|
activity_obj.put()
|
|
|
|
|
2021-07-10 15:07:40 +00:00
|
|
|
if type == 'Accept': # eg in response to a Follow
|
2021-12-30 03:34:46 +00:00
|
|
|
return '' # noop
|
2022-11-17 02:53:49 +00:00
|
|
|
if type not in SUPPORTED_TYPES:
|
2023-01-24 20:17:24 +00:00
|
|
|
error(f'Sorry, {type} activities are not supported yet.', status=501)
|
2021-07-10 15:07:40 +00:00
|
|
|
|
2023-02-15 18:57:11 +00:00
|
|
|
# load user
|
2023-02-13 19:25:33 +00:00
|
|
|
user = None
|
|
|
|
if domain:
|
|
|
|
user = User.get_by_id(domain)
|
|
|
|
if not user:
|
|
|
|
return f'User {domain} not found', 404
|
|
|
|
|
2023-02-15 18:57:11 +00:00
|
|
|
# optionally verify signature
|
|
|
|
# TODO: switch this from erroring to logging lots of detail. need to see
|
|
|
|
# which headers, key shapes, etc we get in the wild.
|
2023-02-16 04:10:17 +00:00
|
|
|
sig = request.headers.get('Signature')
|
|
|
|
if sig:
|
|
|
|
logger.info(f'Headers: {json_dumps(dict(request.headers), indent=2)}')
|
|
|
|
# parse_signature_header lower-cases all keys
|
|
|
|
keyId = parse_signature_header(sig).get('keyid')
|
2023-02-15 23:48:12 +00:00
|
|
|
digest = request.headers.get('Digest') or ''
|
|
|
|
expected = b64encode(sha256(request.data).digest()).decode()
|
2023-02-16 04:10:17 +00:00
|
|
|
if not keyId:
|
|
|
|
logger.warning('HTTP Signature missing keyId')
|
|
|
|
elif not digest:
|
2023-02-15 20:41:29 +00:00
|
|
|
logger.warning('Missing Digest header, required for HTTP Signature')
|
2023-02-15 23:48:12 +00:00
|
|
|
elif digest.removeprefix('SHA-256=') != expected:
|
2023-02-15 20:41:29 +00:00
|
|
|
logger.warning('Invalid Digest header, required for HTTP Signature')
|
2023-02-15 23:48:12 +00:00
|
|
|
else:
|
2023-02-24 13:25:29 +00:00
|
|
|
key_actor = common.get_object(keyId, user=user).as2
|
2023-02-16 04:10:17 +00:00
|
|
|
key = key_actor.get("publicKey", {}).get('publicKeyPem')
|
2023-02-16 06:05:14 +00:00
|
|
|
logger.info(f'Verifying signature for {request.path} with key {key}')
|
2023-02-15 23:48:12 +00:00
|
|
|
try:
|
2023-02-16 06:05:14 +00:00
|
|
|
if HeaderVerifier(request.headers, key, required_headers=['Digest'],
|
|
|
|
method=request.method, path=request.path,
|
2023-02-15 18:57:11 +00:00
|
|
|
sign_header='signature').verify():
|
2023-02-15 23:48:12 +00:00
|
|
|
logger.info('HTTP Signature verified!')
|
|
|
|
else:
|
|
|
|
logger.warning('HTTP Signature verification failed')
|
|
|
|
except BaseException as e:
|
|
|
|
logger.warning(f'HTTP Signature verification failed: {e}')
|
2023-02-15 20:41:29 +00:00
|
|
|
else:
|
|
|
|
logger.info('No HTTP Signature')
|
2023-02-15 18:57:11 +00:00
|
|
|
|
|
|
|
# handle activity!
|
2023-02-12 06:23:01 +00:00
|
|
|
if type == 'Undo' and obj_as2.get('type') == 'Follow':
|
2021-07-10 15:07:40 +00:00
|
|
|
# skip actor fetch below; we don't need it to undo a follow
|
2023-02-19 01:53:27 +00:00
|
|
|
undo_follow(activity_unwrapped)
|
|
|
|
activity_obj.status = 'complete'
|
|
|
|
activity_obj.put()
|
|
|
|
return 'OK'
|
2023-02-12 06:23:01 +00:00
|
|
|
|
2023-01-26 03:44:48 +00:00
|
|
|
elif type == 'Update':
|
2023-02-12 06:53:50 +00:00
|
|
|
obj_id = obj_as2.get('id')
|
|
|
|
if not obj_id:
|
|
|
|
error("Couldn't find obj_id of object to update")
|
2023-02-12 06:23:01 +00:00
|
|
|
|
2023-02-12 06:53:50 +00:00
|
|
|
obj = Object.get_by_id(obj_id) or Object(id=obj_id)
|
2023-02-24 13:25:29 +00:00
|
|
|
obj.populate(as2=obj_as2, source_protocol='activitypub')
|
2023-02-12 06:23:01 +00:00
|
|
|
obj.put()
|
2023-02-19 01:53:27 +00:00
|
|
|
|
|
|
|
activity_obj.status = 'complete'
|
|
|
|
activity_obj.put()
|
2023-02-12 06:23:01 +00:00
|
|
|
return 'OK'
|
|
|
|
|
2021-07-10 15:07:40 +00:00
|
|
|
elif type == 'Delete':
|
2023-02-12 06:53:50 +00:00
|
|
|
obj_id = obj_as2.get('id')
|
|
|
|
if not obj_id:
|
2022-11-21 20:14:37 +00:00
|
|
|
error("Couldn't find id of object to delete")
|
2023-02-12 05:46:47 +00:00
|
|
|
|
2023-02-12 06:53:50 +00:00
|
|
|
obj = Object.get_by_id(obj_id)
|
2023-02-12 05:46:47 +00:00
|
|
|
if obj:
|
2023-02-12 06:53:50 +00:00
|
|
|
logger.info(f'Marking Object {obj_id} deleted')
|
2023-02-12 05:46:47 +00:00
|
|
|
obj.deleted = True
|
|
|
|
obj.put()
|
|
|
|
|
|
|
|
# assume this is an actor
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/63
|
2023-02-12 06:53:50 +00:00
|
|
|
logger.info(f'Deactivating Followers with src or dest = {obj_id}')
|
|
|
|
followers = Follower.query(OR(Follower.src == obj_id,
|
|
|
|
Follower.dest == obj_id)
|
2022-11-21 20:14:37 +00:00
|
|
|
).fetch()
|
|
|
|
for f in followers:
|
|
|
|
f.status = 'inactive'
|
2023-02-19 01:53:27 +00:00
|
|
|
activity_obj.status = 'complete'
|
|
|
|
ndb.put_multi(followers + [activity_obj])
|
2022-11-21 20:14:37 +00:00
|
|
|
return 'OK'
|
2021-07-10 15:07:40 +00:00
|
|
|
|
2023-02-16 04:10:17 +00:00
|
|
|
# fetch actor if necessary so we have name, profile photo, etc
|
|
|
|
if actor and isinstance(actor, str):
|
2023-02-19 01:53:27 +00:00
|
|
|
actor = activity['actor'] = activity_unwrapped['actor'] = \
|
2023-02-24 13:25:29 +00:00
|
|
|
common.get_object(actor, user=user).as2
|
2023-02-16 04:10:17 +00:00
|
|
|
|
2023-02-12 03:58:07 +00:00
|
|
|
# fetch object if necessary so we can render it in feeds
|
2023-02-24 03:17:26 +00:00
|
|
|
inner_obj = activity_unwrapped.get('object')
|
|
|
|
if type in FETCH_OBJECT_TYPES and isinstance(inner_obj, str):
|
|
|
|
obj = Object.get_by_id(inner_obj) or common.get_object(inner_obj, user=user)
|
2023-02-22 03:19:56 +00:00
|
|
|
obj_as2 = activity['object'] = activity_unwrapped['object'] = \
|
2023-02-24 13:25:29 +00:00
|
|
|
obj.as2 if obj.as2 else as2.from_as1(obj.as1)
|
2023-02-22 03:19:56 +00:00
|
|
|
|
2021-07-10 15:07:40 +00:00
|
|
|
if type == 'Follow':
|
2023-02-19 01:53:27 +00:00
|
|
|
resp = accept_follow(activity, activity_unwrapped, user)
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
# send webmentions to each target
|
2023-02-24 13:25:29 +00:00
|
|
|
activity_obj.as2 = activity_unwrapped
|
2023-02-19 01:53:27 +00:00
|
|
|
common.send_webmentions(as2.to_as1(activity), activity_obj, proxy=True)
|
2022-11-17 02:53:49 +00:00
|
|
|
|
2023-02-08 18:48:05 +00:00
|
|
|
# deliver original posts and reposts to followers
|
2023-02-12 06:23:01 +00:00
|
|
|
if ((type == 'Create' and not activity.get('inReplyTo') and not obj_as2.get('inReplyTo'))
|
2023-02-08 18:48:05 +00:00
|
|
|
or type == 'Announce'):
|
2022-08-24 00:37:50 +00:00
|
|
|
# check that this activity is public. only do this check for Creates,
|
|
|
|
# not Like, Follow, or other activity types, since Mastodon doesn't
|
|
|
|
# currently mark those as explicitly public.
|
|
|
|
if not as2.is_public(activity_unwrapped):
|
2023-02-14 23:38:42 +00:00
|
|
|
logger.info('Dropping non-public activity')
|
2022-08-24 00:37:50 +00:00
|
|
|
return ''
|
|
|
|
|
2022-11-17 02:53:49 +00:00
|
|
|
if actor:
|
|
|
|
actor_id = actor.get('id')
|
|
|
|
if actor_id:
|
2023-02-14 23:38:42 +00:00
|
|
|
logger.info(f'Finding followers of {actor_id}')
|
2023-02-19 01:53:27 +00:00
|
|
|
for f in Follower.query(Follower.dest == actor_id,
|
|
|
|
projection=[Follower.src]):
|
|
|
|
if f.src not in activity_obj.domains:
|
|
|
|
activity_obj.domains.append(f.src)
|
|
|
|
if activity_obj.domains and 'feed' not in activity_obj.labels:
|
|
|
|
activity_obj.labels.append('feed')
|
2023-02-12 06:53:50 +00:00
|
|
|
|
2023-02-24 03:17:26 +00:00
|
|
|
if (activity_obj.as1.get('objectType') == 'activity'
|
2023-02-19 01:53:27 +00:00
|
|
|
and 'activity' not in activity_obj.labels):
|
|
|
|
activity_obj.labels.append('activity')
|
2023-02-24 03:17:26 +00:00
|
|
|
|
2023-02-19 01:53:27 +00:00
|
|
|
activity_obj.put()
|
2023-02-12 06:53:50 +00:00
|
|
|
return 'OK'
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
|
2022-11-24 16:20:04 +00:00
|
|
|
def accept_follow(follow, follow_unwrapped, user):
|
2021-07-10 15:07:40 +00:00
|
|
|
"""Replies to an AP Follow request with an Accept request.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
follow: dict, AP Follow activity
|
|
|
|
follow_unwrapped: dict, same, except with redirect URLs unwrapped
|
2022-11-24 16:20:04 +00:00
|
|
|
user: :class:`User`
|
2021-07-10 15:07:40 +00:00
|
|
|
"""
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info('Replying to Follow with Accept')
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
followee = follow.get('object')
|
|
|
|
followee_unwrapped = follow_unwrapped.get('object')
|
2023-01-12 20:28:34 +00:00
|
|
|
followee_id = (followee_unwrapped.get('id')
|
|
|
|
if isinstance(followee_unwrapped, dict) else followee_unwrapped)
|
2021-07-10 15:07:40 +00:00
|
|
|
follower = follow.get('actor')
|
2023-01-12 20:28:34 +00:00
|
|
|
if not followee or not followee_id or not follower:
|
2023-01-24 20:17:24 +00:00
|
|
|
error(f'Follow activity requires object and actor. Got: {follow}')
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
inbox = follower.get('inbox')
|
|
|
|
follower_id = follower.get('id')
|
|
|
|
if not inbox or not follower_id:
|
2023-01-24 20:17:24 +00:00
|
|
|
error(f'Follow actor requires id and inbox. Got: {follower}')
|
2021-07-10 15:07:40 +00:00
|
|
|
|
2023-01-12 20:28:34 +00:00
|
|
|
# rendered mf2 HTML proxy pages (in render.py) fall back to redirecting to
|
|
|
|
# the follow's AS2 id field, but Mastodon's ids are URLs that don't load in
|
|
|
|
# browsers, eg https://jawns.club/ac33c547-ca6b-4351-80d5-d11a6879a7b0
|
|
|
|
# so, set a synthetic URL based on the follower's profile.
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/336
|
|
|
|
follower_url = util.get_url(follower) or follower_id
|
|
|
|
followee_url = util.get_url(followee_unwrapped) or followee_id
|
|
|
|
follow_unwrapped.setdefault('url', f'{follower_url}#followed-{followee_url}')
|
|
|
|
|
2021-07-10 15:07:40 +00:00
|
|
|
# store Follower
|
2023-02-08 02:25:24 +00:00
|
|
|
follower = Follower.get_or_create(dest=user.key.id(), src=follower_id,
|
2023-02-24 13:25:29 +00:00
|
|
|
last_follow=follow)
|
2022-11-30 06:43:04 +00:00
|
|
|
follower.status = 'active'
|
|
|
|
follower.put()
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
# send AP Accept
|
|
|
|
accept = {
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2023-01-24 20:17:24 +00:00
|
|
|
'id': util.tag_uri(common.PRIMARY_DOMAIN,
|
2023-02-08 02:25:24 +00:00
|
|
|
f'accept/{user.key.id()}/{follow.get("id")}'),
|
2021-07-10 15:07:40 +00:00
|
|
|
'type': 'Accept',
|
|
|
|
'actor': followee,
|
|
|
|
'object': {
|
|
|
|
'type': 'Follow',
|
|
|
|
'actor': follower_id,
|
|
|
|
'object': followee,
|
2018-10-19 13:50:00 +00:00
|
|
|
}
|
2021-07-10 15:07:40 +00:00
|
|
|
}
|
2023-02-19 01:53:27 +00:00
|
|
|
return common.signed_post(inbox, data=accept, user=user)
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
@ndb.transactional()
|
|
|
|
def undo_follow(undo_unwrapped):
|
2022-11-08 00:26:33 +00:00
|
|
|
"""Handles an AP Undo Follow request by deactivating the Follower entity.
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
undo_unwrapped: dict, AP Undo activity with redirect URLs unwrapped
|
|
|
|
"""
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info('Undoing Follow')
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
follow = undo_unwrapped.get('object', {})
|
|
|
|
follower = follow.get('actor')
|
|
|
|
followee = follow.get('object')
|
2023-01-10 06:58:30 +00:00
|
|
|
if isinstance(followee, dict):
|
|
|
|
followee = followee.get('id') or util.get_url(followee)
|
2021-07-10 15:07:40 +00:00
|
|
|
if not follower or not followee:
|
2023-01-24 20:17:24 +00:00
|
|
|
error(f'Undo of Follow requires object with actor and object. Got: {follow}')
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
# deactivate Follower
|
2022-11-08 00:26:33 +00:00
|
|
|
user_domain = util.domain_from_link(followee, minimize=False)
|
2022-11-12 06:43:33 +00:00
|
|
|
follower_obj = Follower.get_by_id(Follower._id(dest=user_domain, src=follower))
|
2021-07-10 15:07:40 +00:00
|
|
|
if follower_obj:
|
|
|
|
follower_obj.status = 'inactive'
|
|
|
|
follower_obj.put()
|
|
|
|
else:
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.warning(f'No Follower found for {user_domain} {follower}')
|
2021-07-10 15:07:40 +00:00
|
|
|
|
|
|
|
# TODO send webmention with 410 of u-follow
|
2022-11-22 02:46:10 +00:00
|
|
|
|
|
|
|
|
2023-01-19 04:32:23 +00:00
|
|
|
@app.get(f'/<regex("{common.DOMAIN_RE}"):domain>/<any(followers,following):collection>')
|
2022-11-22 02:46:10 +00:00
|
|
|
@flask_util.cached(cache, CACHE_TIME)
|
2023-01-19 15:00:13 +00:00
|
|
|
def follower_collection(domain, collection):
|
2023-01-19 04:32:23 +00:00
|
|
|
"""ActivityPub Followers and Following collections.
|
2022-11-22 02:46:10 +00:00
|
|
|
|
|
|
|
https://www.w3.org/TR/activitypub/#followers
|
|
|
|
https://www.w3.org/TR/activitypub/#collections
|
|
|
|
https://www.w3.org/TR/activitystreams-core/#paging
|
|
|
|
"""
|
|
|
|
if not User.get_by_id(domain):
|
|
|
|
return f'User {domain} not found', 404
|
|
|
|
|
2023-01-20 15:02:55 +00:00
|
|
|
# page
|
|
|
|
followers, new_before, new_after = common.fetch_followers(domain, collection)
|
|
|
|
items = []
|
|
|
|
for f in followers:
|
|
|
|
f_as2 = f.to_as2()
|
|
|
|
if f_as2:
|
|
|
|
items.append(f_as2)
|
|
|
|
|
|
|
|
page = {
|
|
|
|
'type': 'CollectionPage',
|
|
|
|
'partOf': request.base_url,
|
|
|
|
'items': items,
|
|
|
|
}
|
|
|
|
if new_before:
|
|
|
|
page['next'] = f'{request.base_url}?before={new_before}'
|
|
|
|
if new_after:
|
|
|
|
page['prev'] = f'{request.base_url}?after={new_after}'
|
|
|
|
|
|
|
|
if 'before' in request.args or 'after' in request.args:
|
|
|
|
page.update({
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
|
|
|
'id': request.url,
|
|
|
|
})
|
|
|
|
logger.info(f'Returning {json_dumps(page, indent=2)}')
|
|
|
|
return page, {'Content-Type': as2.CONTENT_TYPE}
|
|
|
|
|
|
|
|
# collection
|
2023-01-19 04:32:23 +00:00
|
|
|
domain_prop = Follower.dest if collection == 'followers' else Follower.src
|
2023-01-20 15:02:55 +00:00
|
|
|
count = Follower.query(
|
2022-11-22 02:46:10 +00:00
|
|
|
Follower.status == 'active',
|
2023-01-19 04:32:23 +00:00
|
|
|
domain_prop == domain,
|
2023-01-20 15:02:55 +00:00
|
|
|
).count()
|
|
|
|
|
|
|
|
collection = {
|
2022-11-22 02:46:10 +00:00
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
2023-01-20 15:02:55 +00:00
|
|
|
'id': request.base_url,
|
2022-11-22 02:46:10 +00:00
|
|
|
'type': 'Collection',
|
2023-01-20 15:02:55 +00:00
|
|
|
'summary': f"{domain}'s {collection}",
|
2022-11-22 02:46:10 +00:00
|
|
|
'totalItems': count,
|
2023-01-20 15:02:55 +00:00
|
|
|
'first': page,
|
2022-11-22 02:46:10 +00:00
|
|
|
}
|
2023-01-20 15:02:55 +00:00
|
|
|
logger.info(f'Returning {json_dumps(collection, indent=2)}')
|
|
|
|
return collection, {'Content-Type': as2.CONTENT_TYPE}
|
2023-01-25 21:12:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.get(f'/<regex("{common.DOMAIN_RE}"):domain>/outbox')
|
|
|
|
def outbox(domain):
|
|
|
|
url = common.host_url(f"{domain}/outbox")
|
|
|
|
return {
|
|
|
|
'@context': 'https://www.w3.org/ns/activitystreams',
|
|
|
|
'id': url,
|
|
|
|
'summary': f"{domain}'s outbox",
|
|
|
|
'type': 'OrderedCollection',
|
|
|
|
'totalItems': 0,
|
|
|
|
'first': {
|
|
|
|
'type': 'CollectionPage',
|
|
|
|
'partOf': url,
|
|
|
|
'items': [],
|
|
|
|
},
|
|
|
|
}, {'Content-Type': as2.CONTENT_TYPE}
|