2023-03-08 21:10:41 +00:00
|
|
|
"""Base protocol class and common code."""
|
2023-10-26 19:04:04 +00:00
|
|
|
import copy
|
2023-03-08 21:10:41 +00:00
|
|
|
import logging
|
2023-11-13 22:18:32 +00:00
|
|
|
from threading import Lock
|
2023-06-14 21:57:59 +00:00
|
|
|
from urllib.parse import urljoin
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-11-13 22:18:32 +00:00
|
|
|
from cachetools import cached, LRUCache
|
2023-10-31 19:49:15 +00:00
|
|
|
from flask import g, request
|
2023-03-08 21:10:41 +00:00
|
|
|
from google.cloud import ndb
|
|
|
|
from google.cloud.ndb import OR
|
2023-06-20 18:22:54 +00:00
|
|
|
from granary import as1
|
2023-10-16 20:04:34 +00:00
|
|
|
from oauth_dropins.webutil.flask_util import cloud_tasks_only
|
2023-10-26 23:00:03 +00:00
|
|
|
from oauth_dropins.webutil import util
|
|
|
|
from oauth_dropins.webutil.util import json_dumps, json_loads
|
2023-06-13 20:17:11 +00:00
|
|
|
import werkzeug.exceptions
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
import common
|
2023-10-26 19:04:04 +00:00
|
|
|
from common import add, DOMAIN_BLOCKLIST, DOMAINS, error, subdomain_wrap
|
2023-09-19 02:19:59 +00:00
|
|
|
from flask_app import app
|
2023-11-02 20:08:12 +00:00
|
|
|
from ids import translate_object_id, translate_user_id
|
2023-11-02 19:18:08 +00:00
|
|
|
from models import Follower, get_originals, Object, PROTOCOLS, Target, User
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
SUPPORTED_TYPES = (
|
|
|
|
'accept',
|
|
|
|
'article',
|
|
|
|
'audio',
|
|
|
|
'comment',
|
|
|
|
'delete',
|
|
|
|
'follow',
|
|
|
|
'image',
|
|
|
|
'like',
|
|
|
|
'note',
|
|
|
|
'post',
|
|
|
|
'share',
|
|
|
|
'stop-following',
|
|
|
|
'undo',
|
|
|
|
'update',
|
|
|
|
'video',
|
|
|
|
)
|
|
|
|
|
|
|
|
# activity ids that we've already handled and can now ignore.
|
|
|
|
# used in Protocol.receive
|
|
|
|
seen_ids = LRUCache(100000)
|
2023-11-13 22:18:32 +00:00
|
|
|
seen_ids_lock = Lock()
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-04-03 03:36:23 +00:00
|
|
|
# objects that have been loaded in Protocol.load
|
|
|
|
objects_cache = LRUCache(5000)
|
2023-11-13 22:18:32 +00:00
|
|
|
objects_cache_lock = Lock()
|
2023-04-03 03:36:23 +00:00
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-05-26 23:07:36 +00:00
|
|
|
class Protocol:
|
2023-03-08 21:10:41 +00:00
|
|
|
"""Base protocol class. Not to be instantiated; classmethods only.
|
|
|
|
|
|
|
|
Attributes:
|
2023-10-06 06:32:31 +00:00
|
|
|
LABEL (str): human-readable lower case name
|
2023-10-06 15:22:50 +00:00
|
|
|
OTHER_LABELS (list of str): label aliases
|
2023-10-06 06:32:31 +00:00
|
|
|
ABBREV (str): lower case abbreviation, used in URL paths
|
2023-10-10 18:14:42 +00:00
|
|
|
LOGO_HTML (str): logo emoji or ``<img>`` tag
|
2023-10-24 23:09:28 +00:00
|
|
|
CONTENT_TYPE (str): MIME type of this protocol's native data format,
|
|
|
|
appropriate for the ``Content-Type`` HTTP header.
|
2023-11-27 22:44:05 +00:00
|
|
|
HAS_FOLLOW_ACCEPTS (bool): whether this protocol supports explicit
|
|
|
|
accept/reject activities in response to follows, eg ActivityPub
|
2023-03-08 21:10:41 +00:00
|
|
|
"""
|
2023-06-11 15:14:17 +00:00
|
|
|
ABBREV = None
|
|
|
|
OTHER_LABELS = ()
|
2023-10-10 18:14:42 +00:00
|
|
|
LOGO_HTML = ''
|
2023-10-24 23:09:28 +00:00
|
|
|
CONTENT_TYPE = None
|
2023-11-27 22:44:05 +00:00
|
|
|
HAS_FOLLOW_ACCEPTS = False
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
assert False
|
|
|
|
|
2023-06-11 15:14:17 +00:00
|
|
|
@classmethod
|
|
|
|
@property
|
|
|
|
def LABEL(cls):
|
|
|
|
return cls.__name__.lower()
|
|
|
|
|
2023-06-11 02:50:31 +00:00
|
|
|
@staticmethod
|
2023-06-13 03:51:32 +00:00
|
|
|
def for_request(fed=None):
|
2023-06-11 02:50:31 +00:00
|
|
|
"""Returns the protocol for the current request.
|
|
|
|
|
|
|
|
...based on the request's hostname.
|
|
|
|
|
2023-06-13 03:51:32 +00:00
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
fed (str or protocol.Protocol): protocol to return if the current
|
|
|
|
request is on ``fed.brid.gy``
|
2023-06-13 03:51:32 +00:00
|
|
|
|
2023-06-11 02:50:31 +00:00
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
Protocol: protocol, or None if the provided domain or request hostname
|
|
|
|
domain is not a subdomain of ``brid.gy`` or isn't a known protocol
|
2023-06-11 02:50:31 +00:00
|
|
|
"""
|
2023-09-22 18:55:19 +00:00
|
|
|
return Protocol.for_bridgy_subdomain(request.host, fed=fed)
|
2023-06-11 02:50:31 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2023-09-22 18:55:19 +00:00
|
|
|
def for_bridgy_subdomain(domain_or_url, fed=None):
|
2023-06-11 02:50:31 +00:00
|
|
|
"""Returns the protocol for a brid.gy subdomain.
|
|
|
|
|
2023-06-13 05:01:12 +00:00
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
domain_or_url (str)
|
|
|
|
fed (str or protocol.Protocol): protocol to return if the current
|
|
|
|
request is on ``fed.brid.gy``
|
2023-06-13 05:01:12 +00:00
|
|
|
|
2023-10-06 15:22:50 +00:00
|
|
|
Returns:
|
|
|
|
class: :class:`Protocol` subclass, or None if the provided domain or request
|
|
|
|
hostname domain is not a subdomain of ``brid.gy`` or isn't a known
|
|
|
|
protocol
|
2023-06-11 02:50:31 +00:00
|
|
|
"""
|
2023-06-13 05:01:12 +00:00
|
|
|
domain = (util.domain_from_link(domain_or_url, minimize=False)
|
|
|
|
if util.is_web(domain_or_url)
|
|
|
|
else domain_or_url)
|
2023-06-11 02:50:31 +00:00
|
|
|
|
2023-06-13 05:01:12 +00:00
|
|
|
if domain == common.PRIMARY_DOMAIN or domain in common.LOCAL_DOMAINS:
|
2023-09-27 20:55:16 +00:00
|
|
|
return PROTOCOLS[fed] if isinstance(fed, str) else fed
|
2023-06-13 05:01:12 +00:00
|
|
|
elif domain and domain.endswith(common.SUPERDOMAIN):
|
|
|
|
label = domain.removesuffix(common.SUPERDOMAIN)
|
|
|
|
return PROTOCOLS.get(label)
|
2023-06-11 02:50:31 +00:00
|
|
|
|
2023-06-13 20:17:11 +00:00
|
|
|
@classmethod
|
|
|
|
def owns_id(cls, id):
|
|
|
|
"""Returns whether this protocol owns the id, or None if it's unclear.
|
|
|
|
|
|
|
|
To be implemented by subclasses.
|
|
|
|
|
2023-09-22 20:11:15 +00:00
|
|
|
IDs are string identities that uniquely identify users, and are intended
|
|
|
|
primarily to be machine readable and usable. Compare to handles, which
|
|
|
|
are human-chosen, human-meaningful, and often but not always unique.
|
|
|
|
|
2023-06-13 20:17:11 +00:00
|
|
|
Some protocols' ids are more or less deterministic based on the id
|
2023-10-06 15:22:50 +00:00
|
|
|
format, eg AT Protocol owns ``at://`` URIs. Others, like http(s) URLs,
|
|
|
|
could be owned by eg Web or ActivityPub.
|
2023-06-13 20:17:11 +00:00
|
|
|
|
|
|
|
This should be a quick guess without expensive side effects, eg no
|
|
|
|
external HTTP fetches to fetch the id itself or otherwise perform
|
|
|
|
discovery.
|
|
|
|
|
2023-10-06 15:22:50 +00:00
|
|
|
Returns False if the id's domain is in :const:`common.DOMAIN_BLOCKLIST`.
|
2023-07-02 21:55:05 +00:00
|
|
|
|
2023-06-13 20:17:11 +00:00
|
|
|
Args:
|
2023-09-22 19:14:50 +00:00
|
|
|
id (str)
|
2023-06-13 20:17:11 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
bool or None:
|
2023-09-22 19:14:50 +00:00
|
|
|
"""
|
|
|
|
return False
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def owns_handle(cls, handle):
|
|
|
|
"""Returns whether this protocol owns the handle, or None if it's unclear.
|
|
|
|
|
|
|
|
To be implemented by subclasses.
|
|
|
|
|
2023-09-22 20:11:15 +00:00
|
|
|
Handles are string identities that are human-chosen, human-meaningful,
|
|
|
|
and often but not always unique. Compare to IDs, which uniquely identify
|
|
|
|
users, and are intended primarily to be machine readable and usable.
|
|
|
|
|
2023-09-22 19:14:50 +00:00
|
|
|
Some protocols' handles are more or less deterministic based on the id
|
|
|
|
format, eg ActivityPub (technically WebFinger) handles are
|
|
|
|
``@user@instance.com``. Others, like domains, could be owned by eg Web,
|
|
|
|
ActivityPub, AT Protocol, or others.
|
|
|
|
|
|
|
|
This should be a quick guess without expensive side effects, eg no
|
|
|
|
external HTTP fetches to fetch the id itself or otherwise perform
|
|
|
|
discovery.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
handle (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool or None
|
2023-06-13 20:17:11 +00:00
|
|
|
"""
|
|
|
|
return False
|
|
|
|
|
2023-09-22 20:11:15 +00:00
|
|
|
@classmethod
|
|
|
|
def handle_to_id(cls, handle):
|
|
|
|
"""Converts a handle to an id.
|
|
|
|
|
|
|
|
To be implemented by subclasses.
|
|
|
|
|
|
|
|
May incur network requests, eg DNS queries or HTTP requests.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
handle (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: corresponding id, or None if the handle can't be found
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2023-06-13 21:30:00 +00:00
|
|
|
@classmethod
|
|
|
|
def key_for(cls, id):
|
2023-10-06 15:22:50 +00:00
|
|
|
"""Returns the :class:`google.cloud.ndb.Key` for a given id's :class:`models.User`.
|
2023-06-13 21:30:00 +00:00
|
|
|
|
2023-07-23 06:32:55 +00:00
|
|
|
To be implemented by subclasses. Canonicalizes the id if necessary.
|
2023-06-14 20:46:13 +00:00
|
|
|
|
|
|
|
If called via `Protocol.key_for`, infers the appropriate protocol with
|
|
|
|
:meth:`for_id`. If called with a concrete subclass, uses that subclass
|
|
|
|
as is.
|
2023-07-23 06:32:55 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
google.cloud.ndb.Key: matching key, or None if the given id is not a
|
|
|
|
valid :class:`User` id for this protocol.
|
2023-06-13 21:30:00 +00:00
|
|
|
"""
|
2023-06-14 20:46:13 +00:00
|
|
|
if cls == Protocol:
|
2023-10-13 19:36:31 +00:00
|
|
|
proto = Protocol.for_id(id)
|
|
|
|
return proto.key_for(id) if proto else None
|
2023-06-14 20:46:13 +00:00
|
|
|
|
2023-10-10 16:57:10 +00:00
|
|
|
# load user so that we follow use_instead
|
2023-11-27 19:18:12 +00:00
|
|
|
existing = cls.get_by_id(id, allow_opt_out=True)
|
2023-10-13 19:36:31 +00:00
|
|
|
if existing:
|
|
|
|
if existing.status == 'opt-out':
|
|
|
|
return None
|
|
|
|
return existing.key
|
|
|
|
|
|
|
|
return cls(id=id).key
|
2023-06-13 21:30:00 +00:00
|
|
|
|
2023-11-13 22:18:32 +00:00
|
|
|
@cached(LRUCache(20000), lock=Lock())
|
2023-06-13 20:17:11 +00:00
|
|
|
@staticmethod
|
|
|
|
def for_id(id):
|
|
|
|
"""Returns the protocol for a given id.
|
|
|
|
|
|
|
|
May incur expensive side effects like fetching the id itself over the
|
|
|
|
network or other discovery.
|
|
|
|
|
|
|
|
Args:
|
2023-09-27 21:58:33 +00:00
|
|
|
id (str)
|
2023-06-13 20:17:11 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
Protocol subclass: matching protocol, or None if no known protocol
|
|
|
|
owns this id
|
|
|
|
"""
|
2023-06-13 20:17:11 +00:00
|
|
|
logger.info(f'Determining protocol for id {id}')
|
|
|
|
if not id:
|
|
|
|
return None
|
|
|
|
|
2023-06-18 14:29:54 +00:00
|
|
|
# step 1: check for our per-protocol subdomains
|
2023-06-13 20:43:41 +00:00
|
|
|
if util.is_web(id):
|
2023-09-22 18:55:19 +00:00
|
|
|
by_subdomain = Protocol.for_bridgy_subdomain(id)
|
|
|
|
if by_subdomain:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {by_subdomain.LABEL} owns id {id}')
|
2023-09-22 18:55:19 +00:00
|
|
|
return by_subdomain
|
2023-06-13 20:43:41 +00:00
|
|
|
|
2023-06-18 14:29:54 +00:00
|
|
|
# step 2: check if any Protocols say conclusively that they own it
|
2023-06-14 20:46:13 +00:00
|
|
|
# sort to be deterministic
|
|
|
|
protocols = sorted(set(p for p in PROTOCOLS.values() if p),
|
2023-09-28 20:15:47 +00:00
|
|
|
key=lambda p: p.LABEL)
|
2023-06-14 20:46:13 +00:00
|
|
|
candidates = []
|
|
|
|
for protocol in protocols:
|
2023-06-13 20:17:11 +00:00
|
|
|
owns = protocol.owns_id(id)
|
|
|
|
if owns:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {protocol.LABEL} owns id {id}')
|
2023-06-13 20:17:11 +00:00
|
|
|
return protocol
|
|
|
|
elif owns is not False:
|
|
|
|
candidates.append(protocol)
|
|
|
|
|
|
|
|
if len(candidates) == 1:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {candidates[0].LABEL} owns id {id}')
|
2023-06-13 20:17:11 +00:00
|
|
|
return candidates[0]
|
|
|
|
|
2023-06-18 14:29:54 +00:00
|
|
|
# step 3: look for existing Objects in the datastore
|
|
|
|
obj = Protocol.load(id, remote=False)
|
|
|
|
if obj and obj.source_protocol:
|
2023-06-27 03:22:06 +00:00
|
|
|
logger.info(f' {obj.key} owned by source_protocol {obj.source_protocol}')
|
2023-06-18 14:29:54 +00:00
|
|
|
return PROTOCOLS[obj.source_protocol]
|
|
|
|
|
|
|
|
# step 4: fetch over the network
|
2023-06-13 20:17:11 +00:00
|
|
|
for protocol in candidates:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f'Trying {protocol.LABEL}')
|
2023-06-13 20:17:11 +00:00
|
|
|
try:
|
2023-07-14 19:45:47 +00:00
|
|
|
if protocol.load(id, local=False, remote=True):
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {protocol.LABEL} owns id {id}')
|
2023-07-14 19:45:47 +00:00
|
|
|
return protocol
|
|
|
|
except werkzeug.exceptions.BadGateway:
|
|
|
|
# we tried and failed fetching the id over the network.
|
|
|
|
# this depends on ActivityPub.fetch raising this!
|
|
|
|
return None
|
2023-06-27 03:22:06 +00:00
|
|
|
except werkzeug.exceptions.HTTPException as e:
|
2023-06-13 20:17:11 +00:00
|
|
|
# internal error we generated ourselves; try next protocol
|
|
|
|
pass
|
|
|
|
except Exception as e:
|
|
|
|
code, _ = util.interpret_http_exception(e)
|
|
|
|
if code:
|
|
|
|
# we tried and failed fetching the id over the network
|
|
|
|
return None
|
2023-06-14 04:36:56 +00:00
|
|
|
raise
|
2023-06-13 20:17:11 +00:00
|
|
|
|
|
|
|
logger.info(f'No matching protocol found for {id} !')
|
|
|
|
return None
|
|
|
|
|
2023-09-27 21:58:33 +00:00
|
|
|
@staticmethod
|
|
|
|
def for_handle(handle):
|
|
|
|
"""Returns the protocol for a given handle.
|
|
|
|
|
|
|
|
May incur expensive side effects like resolving the handle itself over
|
|
|
|
the network or other discovery.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
handle (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(Protocol subclass, str) tuple: matching protocol and optional id (if
|
|
|
|
resolved), or ``(None, None)`` if no known protocol owns this handle
|
|
|
|
"""
|
2023-09-28 20:15:47 +00:00
|
|
|
# TODO: normalize, eg convert domains to lower case
|
2023-09-27 21:58:33 +00:00
|
|
|
logger.info(f'Determining protocol for handle {handle}')
|
|
|
|
if not handle:
|
|
|
|
return (None, None)
|
|
|
|
|
|
|
|
# step 1: check if any Protocols say conclusively that they own it.
|
|
|
|
# sort to be deterministic.
|
|
|
|
protocols = sorted(set(p for p in PROTOCOLS.values() if p),
|
2023-09-28 20:15:47 +00:00
|
|
|
key=lambda p: p.LABEL)
|
2023-09-27 21:58:33 +00:00
|
|
|
candidates = []
|
|
|
|
for proto in protocols:
|
|
|
|
owns = proto.owns_handle(handle)
|
|
|
|
if owns:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {proto.LABEL} owns handle {handle}')
|
2023-09-27 21:58:33 +00:00
|
|
|
return (proto, None)
|
|
|
|
elif owns is not False:
|
|
|
|
candidates.append(proto)
|
|
|
|
|
|
|
|
if len(candidates) == 1:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {candidates[0].LABEL} owns handle {handle}')
|
2023-09-27 21:58:33 +00:00
|
|
|
return (candidates[0], None)
|
|
|
|
|
|
|
|
# step 2: look for matching User in the datastore
|
|
|
|
for proto in candidates:
|
2023-10-13 19:36:31 +00:00
|
|
|
user = proto.query(proto.handle == handle).get()
|
2023-09-27 21:58:33 +00:00
|
|
|
if user:
|
2023-10-13 19:36:31 +00:00
|
|
|
if user.status == 'opt-out':
|
|
|
|
return (None, None)
|
|
|
|
logger.info(f' user {user.key} owns handle {handle}')
|
|
|
|
return (proto, user.key.id())
|
2023-09-27 21:58:33 +00:00
|
|
|
|
|
|
|
# step 3: resolve handle to id
|
|
|
|
for proto in candidates:
|
|
|
|
id = proto.handle_to_id(handle)
|
|
|
|
if id:
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f' {proto.LABEL} resolved handle {handle} to id {id}')
|
2023-09-27 21:58:33 +00:00
|
|
|
return (proto, id)
|
|
|
|
|
|
|
|
return (None, None)
|
|
|
|
|
2023-07-03 15:19:30 +00:00
|
|
|
@classmethod
|
2023-11-20 04:52:03 +00:00
|
|
|
def actor_key(cls, obj):
|
2023-07-03 15:19:30 +00:00
|
|
|
"""Returns the :class:`User`: key for a given object's author or actor.
|
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object)
|
2023-07-03 15:19:30 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
google.cloud.ndb.key.Key or None:
|
2023-07-03 15:19:30 +00:00
|
|
|
"""
|
|
|
|
owner = as1.get_owner(obj.as1)
|
|
|
|
if owner:
|
|
|
|
return cls.key_for(owner)
|
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
@classmethod
|
2023-11-26 04:07:14 +00:00
|
|
|
def send(to_cls, obj, url, from_user=None, orig_obj=None):
|
2023-03-08 21:10:41 +00:00
|
|
|
"""Sends an outgoing activity.
|
|
|
|
|
|
|
|
To be implemented by subclasses.
|
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object): with activity to send
|
|
|
|
url (str): destination URL to send to
|
2023-11-26 04:07:14 +00:00
|
|
|
from_user (models.User): user (actor) this activity is from
|
2023-10-07 19:48:20 +00:00
|
|
|
orig_obj (models.Object): the "original object" that this object
|
|
|
|
refers to, eg replies to or reposts or likes
|
2023-03-20 18:23:49 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-07 19:48:20 +00:00
|
|
|
bool: True if the activity is sent successfully, False if it is
|
|
|
|
ignored or otherwise unsent due to protocol logic, eg no webmention
|
|
|
|
endpoint, protocol doesn't support the activity type. (Failures are
|
|
|
|
raised as exceptions.)
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
Raises:
|
2023-10-07 19:48:20 +00:00
|
|
|
werkzeug.HTTPException if the request fails
|
2023-03-08 21:10:41 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@classmethod
|
2023-06-12 22:50:47 +00:00
|
|
|
def fetch(cls, obj, **kwargs):
|
2023-06-18 14:29:54 +00:00
|
|
|
"""Fetches a protocol-specific object and populates it in an :class:`Object`.
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-07-14 19:45:47 +00:00
|
|
|
Errors are raised as exceptions. If this method returns False, the fetch
|
|
|
|
didn't fail but didn't succeed either, eg the id isn't valid for this
|
|
|
|
protocol, or the fetch didn't return valid data for this protocol.
|
|
|
|
|
2023-06-18 14:29:54 +00:00
|
|
|
To be implemented by subclasses.
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object): with the id to fetch. Data is filled into one of
|
2023-10-06 15:22:50 +00:00
|
|
|
the protocol-specific properties, eg ``as2``, ``mf2``, ``bsky``.
|
|
|
|
kwargs: subclass-specific
|
2023-07-14 19:45:47 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
bool: True if the object was fetched and populated successfully,
|
2023-07-14 19:45:47 +00:00
|
|
|
False otherwise
|
2023-10-06 15:22:50 +00:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
werkzeug.HTTPException: if the fetch fails
|
2023-03-08 21:10:41 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2023-05-24 04:30:57 +00:00
|
|
|
@classmethod
|
2023-11-26 04:07:14 +00:00
|
|
|
def convert(cls, obj, from_user=None):
|
2023-10-24 23:09:28 +00:00
|
|
|
"""Converts an :class:`Object` to this protocol's data format.
|
2023-05-24 04:30:57 +00:00
|
|
|
|
2023-10-24 23:09:28 +00:00
|
|
|
For example, an HTML string for :class:`Web`, or a dict with AS2 JSON
|
|
|
|
and ``application/activity+json`` for :class:`ActivityPub`.
|
2023-05-24 04:30:57 +00:00
|
|
|
|
2023-11-03 22:52:37 +00:00
|
|
|
To be implemented by subclasses. Implementations should generally call
|
|
|
|
:meth:`Protocol.translate_ids` (as their own class) before converting to
|
|
|
|
their format.
|
2023-05-24 04:30:57 +00:00
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object):
|
2023-11-26 04:07:14 +00:00
|
|
|
from_user (models.User): user (actor) this activity/object is from
|
2023-05-24 04:30:57 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-11-03 22:52:37 +00:00
|
|
|
converted object in the protocol's native format, often a dict
|
2023-05-24 04:30:57 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2023-06-16 20:16:17 +00:00
|
|
|
@classmethod
|
|
|
|
def target_for(cls, obj, shared=False):
|
2023-06-21 00:06:32 +00:00
|
|
|
"""Returns an :class:`Object`'s delivery target (endpoint).
|
2023-06-16 20:16:17 +00:00
|
|
|
|
|
|
|
To be implemented by subclasses.
|
|
|
|
|
|
|
|
Examples:
|
|
|
|
|
2023-10-06 15:22:50 +00:00
|
|
|
* If obj has ``source_protocol`` ``web``, returns its URL, as a
|
2023-06-16 20:16:17 +00:00
|
|
|
webmention target.
|
2023-10-06 15:22:50 +00:00
|
|
|
* If obj is an ``activitypub`` actor, returns its inbox.
|
|
|
|
* If obj is an ``activitypub`` object, returns it's author's or actor's
|
2023-06-21 00:06:32 +00:00
|
|
|
inbox.
|
2023-06-16 20:16:17 +00:00
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object):
|
2023-10-06 15:22:50 +00:00
|
|
|
shared (bool): optional. If True, returns a common/shared
|
|
|
|
endpoint, eg ActivityPub's ``sharedInbox``, that can be reused for
|
2023-06-16 20:16:17 +00:00
|
|
|
multiple recipients for efficiency
|
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
str: target endpoint, or None if not available.
|
2023-06-16 20:16:17 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2023-09-06 23:15:19 +00:00
|
|
|
@classmethod
|
|
|
|
def is_blocklisted(cls, url):
|
|
|
|
"""Returns True if we block the given URL and shouldn't deliver to it.
|
|
|
|
|
|
|
|
Default implementation here, subclasses may override.
|
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
url (str):
|
2023-09-06 23:15:19 +00:00
|
|
|
|
2023-10-06 15:22:50 +00:00
|
|
|
Returns: bool:
|
2023-09-06 23:15:19 +00:00
|
|
|
"""
|
|
|
|
return util.domain_or_parent_in(util.domain_from_link(url),
|
|
|
|
DOMAIN_BLOCKLIST + DOMAINS)
|
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
@classmethod
|
2023-10-26 20:49:42 +00:00
|
|
|
def translate_ids(to_cls, obj):
|
2023-10-26 19:04:04 +00:00
|
|
|
"""Wraps ids and actors in an AS1 object in subdomain convert URLs.
|
|
|
|
|
|
|
|
Infers source protocol for each id value separately.
|
|
|
|
|
|
|
|
For example, if ``proto`` is :class:`ActivityPub`, the ATProto URI
|
|
|
|
``at://did:plc:abc/coll/123`` will be converted to
|
|
|
|
``https://atproto.brid.gy/ap/at://did:plc:abc/coll/123``.
|
|
|
|
|
|
|
|
Wraps these AS1 fields:
|
|
|
|
|
|
|
|
* ``id``
|
|
|
|
* ``actor``
|
|
|
|
* ``author``
|
|
|
|
* ``object``
|
|
|
|
* ``object.actor``
|
|
|
|
* ``object.author``
|
|
|
|
* ``object.id``
|
|
|
|
* ``object.inReplyTo``
|
|
|
|
* ``tags.[objectType=mention].url``
|
|
|
|
|
2023-11-01 19:30:30 +00:00
|
|
|
This is the inverse of :meth:`models.Object.resolve_ids`. Much of the
|
|
|
|
same logic is duplicated there!
|
2023-10-26 19:04:04 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
to_proto (Protocol subclass)
|
2023-11-03 22:52:37 +00:00
|
|
|
obj (dict): AS1 object or activity (not :class:`models.Object`!)
|
2023-10-26 19:04:04 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict: wrapped version of ``obj``
|
|
|
|
"""
|
2023-11-03 22:52:37 +00:00
|
|
|
assert to_cls != Protocol
|
2023-11-03 22:10:05 +00:00
|
|
|
if not obj:
|
|
|
|
return obj
|
|
|
|
|
2023-10-26 19:04:04 +00:00
|
|
|
outer_obj = copy.deepcopy(obj)
|
|
|
|
inner_obj = outer_obj['object'] = as1.get_object(outer_obj)
|
|
|
|
|
2023-11-02 20:08:12 +00:00
|
|
|
def translate(elem, field, fn):
|
2023-10-26 19:04:04 +00:00
|
|
|
elem[field] = as1.get_object(elem, field)
|
2023-10-27 00:18:01 +00:00
|
|
|
id = elem[field].get('id')
|
|
|
|
if id and util.domain_from_link(id) not in DOMAINS:
|
|
|
|
from_cls = Protocol.for_id(id)
|
2023-11-01 19:30:30 +00:00
|
|
|
# TODO: what if from_cls is None? relax translate_object_id,
|
|
|
|
# make it a noop if we don't know enough about from/to?
|
|
|
|
if from_cls and from_cls != to_cls:
|
2023-11-02 20:08:12 +00:00
|
|
|
elem[field]['id'] = fn(id=id, from_proto=from_cls, to_proto=to_cls)
|
2023-11-01 19:30:30 +00:00
|
|
|
if elem[field].keys() == {'id'}:
|
|
|
|
elem[field] = elem[field]['id']
|
2023-10-26 19:04:04 +00:00
|
|
|
|
2023-11-02 20:08:12 +00:00
|
|
|
type = as1.object_type(outer_obj)
|
|
|
|
translate(outer_obj, 'id',
|
|
|
|
translate_user_id if type in as1.ACTOR_TYPES
|
|
|
|
else translate_object_id)
|
|
|
|
|
2023-11-03 22:10:05 +00:00
|
|
|
inner_is_actor = (as1.object_type(inner_obj) in as1.ACTOR_TYPES
|
|
|
|
or type in ('follow', 'stop-following'))
|
2023-11-02 20:08:12 +00:00
|
|
|
translate(inner_obj, 'id',
|
2023-11-03 22:10:05 +00:00
|
|
|
translate_user_id if inner_is_actor else translate_object_id)
|
2023-10-26 19:04:04 +00:00
|
|
|
|
2023-11-02 20:08:12 +00:00
|
|
|
for o in outer_obj, inner_obj:
|
|
|
|
translate(o, 'inReplyTo', translate_object_id)
|
|
|
|
for field in 'actor', 'author':
|
|
|
|
translate(o, field, translate_user_id)
|
|
|
|
for tag in as1.get_objects(o, 'tags'):
|
|
|
|
if tag.get('objectType') == 'mention':
|
|
|
|
translate(tag, 'url', translate_user_id)
|
2023-10-26 19:04:04 +00:00
|
|
|
|
|
|
|
outer_obj = util.trim_nulls(outer_obj)
|
2023-11-01 19:30:30 +00:00
|
|
|
if outer_obj.get('object', {}).keys() == {'id'}:
|
2023-10-26 19:04:04 +00:00
|
|
|
outer_obj['object'] = inner_obj['id']
|
|
|
|
|
|
|
|
return outer_obj
|
|
|
|
|
|
|
|
@classmethod
|
2023-10-16 18:13:38 +00:00
|
|
|
def receive(from_cls, obj, authed_as=None):
|
2023-03-08 21:10:41 +00:00
|
|
|
"""Handles an incoming activity.
|
|
|
|
|
2023-10-06 15:22:50 +00:00
|
|
|
If ``obj``'s key is unset, ``obj.as1``'s id field is used. If both are
|
|
|
|
unset, raises :class:`werkzeug.exceptions.BadRequest`.
|
2023-07-02 05:40:42 +00:00
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
Args:
|
2023-10-06 15:22:50 +00:00
|
|
|
obj (models.Object)
|
2023-11-26 04:07:14 +00:00
|
|
|
authed_as (str): authenticated actor id who sent this activity
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
(str, int) tuple: (response body, HTTP status code) Flask response
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
Raises:
|
2023-10-06 15:22:50 +00:00
|
|
|
werkzeug.HTTPException: if the request is invalid
|
2023-03-08 21:10:41 +00:00
|
|
|
"""
|
2023-07-02 05:40:42 +00:00
|
|
|
# check some invariants
|
2023-09-09 21:17:47 +00:00
|
|
|
assert from_cls != Protocol
|
2023-07-02 05:40:42 +00:00
|
|
|
assert isinstance(obj, Object), obj
|
2023-09-28 20:15:47 +00:00
|
|
|
logger.info(f'From {from_cls.LABEL}: {obj.key} AS1: {json_dumps(obj.as1, indent=2)}')
|
2023-07-02 05:40:42 +00:00
|
|
|
|
|
|
|
if not obj.as1:
|
|
|
|
error('No object data provided')
|
2023-06-13 03:51:32 +00:00
|
|
|
|
2023-07-06 19:38:51 +00:00
|
|
|
id = None
|
|
|
|
if obj.key and obj.key.id():
|
|
|
|
id = obj.key.id()
|
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
if not id:
|
2023-07-02 05:40:42 +00:00
|
|
|
id = obj.as1.get('id')
|
|
|
|
obj.key = ndb.Key(Object, id)
|
|
|
|
|
2023-07-06 19:38:51 +00:00
|
|
|
if not id:
|
|
|
|
error('No id provided')
|
|
|
|
|
2023-07-01 15:03:47 +00:00
|
|
|
# short circuit if we've already seen this activity id.
|
|
|
|
# (don't do this for bare objects since we need to check further down
|
|
|
|
# whether they've been updated since we saw them last.)
|
2023-10-20 05:15:14 +00:00
|
|
|
if obj.as1.get('objectType') == 'activity' and 'force' not in request.values:
|
2023-07-01 15:03:47 +00:00
|
|
|
with seen_ids_lock:
|
|
|
|
already_seen = id in seen_ids
|
|
|
|
seen_ids[id] = True
|
2023-10-20 05:15:14 +00:00
|
|
|
|
|
|
|
if (already_seen
|
|
|
|
or (obj.new is False and obj.changed is False)
|
|
|
|
or (obj.new is None and obj.changed is None
|
|
|
|
and from_cls.load(id, remote=False))):
|
|
|
|
msg = f'Already handled this activity {id}'
|
|
|
|
logger.info(msg)
|
|
|
|
return msg, 204
|
2023-07-02 05:40:42 +00:00
|
|
|
|
2023-11-26 04:07:14 +00:00
|
|
|
# load actor user, check authorization
|
2023-10-16 18:13:38 +00:00
|
|
|
actor = as1.get_owner(obj.as1)
|
2023-11-26 04:07:14 +00:00
|
|
|
if not actor:
|
2023-11-27 14:51:57 +00:00
|
|
|
error('Activity missing actor or author', status=400)
|
2023-11-26 04:07:14 +00:00
|
|
|
|
|
|
|
if authed_as:
|
|
|
|
assert isinstance(authed_as, str)
|
|
|
|
if actor != authed_as:
|
|
|
|
logger.warning(f"actor {actor} isn't authed user {authed_as}")
|
|
|
|
|
|
|
|
from_user = from_cls.get_or_create(id=actor)
|
2023-11-27 19:18:12 +00:00
|
|
|
if not from_user:
|
2023-11-27 14:51:57 +00:00
|
|
|
error(f'Actor {actor} is opted out', status=204)
|
2023-10-16 18:13:38 +00:00
|
|
|
|
2023-10-18 04:52:16 +00:00
|
|
|
# update copy ids to originals
|
2023-10-24 17:46:57 +00:00
|
|
|
obj.resolve_ids()
|
2023-10-18 04:52:16 +00:00
|
|
|
|
2023-07-02 05:40:42 +00:00
|
|
|
# write Object to datastore
|
2023-07-09 02:19:57 +00:00
|
|
|
orig = obj
|
2023-10-16 13:37:52 +00:00
|
|
|
obj = Object.get_or_create(id, **orig.to_dict(), actor=actor)
|
2023-07-09 02:19:57 +00:00
|
|
|
if orig.new is not None:
|
|
|
|
obj.new = orig.new
|
|
|
|
if orig.changed is not None:
|
|
|
|
obj.changed = orig.changed
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-07-01 15:03:47 +00:00
|
|
|
# if this is a post, ie not an activity, wrap it in a create or update
|
2023-09-20 04:46:41 +00:00
|
|
|
obj = from_cls.handle_bare_object(obj)
|
2023-11-26 04:07:14 +00:00
|
|
|
obj.add('users', from_user.key)
|
2023-07-01 15:03:47 +00:00
|
|
|
|
2023-07-16 21:06:03 +00:00
|
|
|
if obj.type not in SUPPORTED_TYPES:
|
|
|
|
error(f'Sorry, {obj.type} activities are not supported yet.', status=501)
|
|
|
|
|
2023-07-03 15:30:13 +00:00
|
|
|
inner_obj_as1 = as1.get_object(obj.as1)
|
2023-07-01 15:03:47 +00:00
|
|
|
if obj.as1.get('verb') in ('post', 'update', 'delete'):
|
2023-11-26 04:07:14 +00:00
|
|
|
if inner_owner := as1.get_owner(inner_obj_as1):
|
|
|
|
if inner_owner_key := from_cls.key_for(inner_owner):
|
|
|
|
obj.add('users', inner_owner_key)
|
2023-07-01 15:03:47 +00:00
|
|
|
|
2023-09-09 21:17:47 +00:00
|
|
|
obj.source_protocol = from_cls.LABEL
|
2023-06-23 18:05:12 +00:00
|
|
|
obj.put()
|
2023-03-11 20:58:36 +00:00
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
# store inner object
|
2023-06-27 03:22:06 +00:00
|
|
|
inner_obj_id = inner_obj_as1.get('id')
|
2023-07-02 21:57:05 +00:00
|
|
|
if obj.type in ('post', 'update') and inner_obj_as1.keys() > set(['id']):
|
2023-07-03 15:30:13 +00:00
|
|
|
Object.get_or_create(inner_obj_id, our_as1=inner_obj_as1,
|
2023-10-16 13:37:52 +00:00
|
|
|
source_protocol=from_cls.LABEL, actor=actor)
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
actor = as1.get_object(obj.as1, 'actor')
|
|
|
|
actor_id = actor.get('id')
|
|
|
|
|
|
|
|
# handle activity!
|
2023-11-28 00:32:22 +00:00
|
|
|
|
|
|
|
# accept, eg in response to a follow. only send if the destination
|
|
|
|
# supports accepts.
|
|
|
|
if obj.type == 'accept':
|
|
|
|
to_cls = Protocol.for_id(inner_obj_id)
|
|
|
|
if not to_cls or not to_cls.HAS_FOLLOW_ACCEPTS:
|
|
|
|
return 'OK' # noop
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
elif obj.type == 'stop-following':
|
2023-09-20 04:46:41 +00:00
|
|
|
# TODO: unify with handle_follow?
|
2023-07-16 21:06:03 +00:00
|
|
|
# TODO: handle multiple followees
|
2023-03-08 21:10:41 +00:00
|
|
|
if not actor_id or not inner_obj_id:
|
|
|
|
error(f'Undo of Follow requires actor id and object id. Got: {actor_id} {inner_obj_id} {obj.as1}')
|
|
|
|
|
|
|
|
# deactivate Follower
|
2023-09-09 21:17:47 +00:00
|
|
|
from_ = from_cls.key_for(actor_id)
|
2023-11-15 22:23:08 +00:00
|
|
|
to_cls = Protocol.for_id(inner_obj_id)
|
2023-07-16 21:06:03 +00:00
|
|
|
to = to_cls.key_for(inner_obj_id)
|
2023-06-14 20:46:13 +00:00
|
|
|
follower = Follower.query(Follower.to == to,
|
|
|
|
Follower.from_ == from_,
|
|
|
|
Follower.status == 'active').get()
|
2023-03-08 21:10:41 +00:00
|
|
|
if follower:
|
2023-04-03 14:53:15 +00:00
|
|
|
logger.info(f'Marking {follower} inactive')
|
2023-03-08 21:10:41 +00:00
|
|
|
follower.status = 'inactive'
|
|
|
|
follower.put()
|
|
|
|
else:
|
2023-06-14 20:46:13 +00:00
|
|
|
logger.warning(f'No Follower found for {from_} => {to}')
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-07-16 21:06:03 +00:00
|
|
|
# fall through to deliver to followee
|
|
|
|
# TODO: do we convert stop-following to webmention 410 of original
|
|
|
|
# follow?
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-09-27 16:42:40 +00:00
|
|
|
elif obj.type in ('update', 'like', 'share'): # require object
|
2023-03-08 21:10:41 +00:00
|
|
|
if not inner_obj_id:
|
|
|
|
error("Couldn't find id of object to update")
|
2023-07-16 21:06:03 +00:00
|
|
|
|
2023-07-02 21:57:05 +00:00
|
|
|
# fall through to deliver to followers
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
elif obj.type == 'delete':
|
|
|
|
if not inner_obj_id:
|
|
|
|
error("Couldn't find id of object to delete")
|
|
|
|
|
2023-07-02 21:57:05 +00:00
|
|
|
logger.info(f'Marking Object {inner_obj_id} deleted')
|
|
|
|
Object.get_or_create(inner_obj_id, deleted=True)
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-07-03 16:05:18 +00:00
|
|
|
# if this is an actor, deactivate its followers/followings
|
2023-03-08 21:10:41 +00:00
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/63
|
2023-09-09 21:17:47 +00:00
|
|
|
deleted_user = from_cls.key_for(id=inner_obj_id)
|
2023-07-23 06:32:55 +00:00
|
|
|
if deleted_user:
|
2023-07-03 16:05:18 +00:00
|
|
|
logger.info(f'Deactivating Followers from or to = {inner_obj_id}')
|
|
|
|
followers = Follower.query(OR(Follower.to == deleted_user,
|
|
|
|
Follower.from_ == deleted_user)
|
|
|
|
).fetch()
|
|
|
|
for f in followers:
|
|
|
|
f.status = 'inactive'
|
|
|
|
ndb.put_multi(followers)
|
|
|
|
|
2023-07-02 21:57:05 +00:00
|
|
|
# fall through to deliver to followers
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
# fetch actor if necessary so we have name, profile photo, etc
|
|
|
|
if actor and actor.keys() == set(['id']):
|
2023-07-07 04:16:04 +00:00
|
|
|
logger.info('Fetching actor so we have name, profile photo, etc')
|
2023-09-09 21:17:47 +00:00
|
|
|
actor_obj = from_cls.load(actor['id'])
|
2023-07-14 19:45:47 +00:00
|
|
|
if actor_obj and actor_obj.as1:
|
2023-06-27 03:22:06 +00:00
|
|
|
obj.our_as1 = {**obj.as1, 'actor': actor_obj.as1}
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
# fetch object if necessary so we can render it in feeds
|
2023-10-18 04:52:16 +00:00
|
|
|
if (obj.type == 'share'
|
|
|
|
and inner_obj_as1.keys() == set(['id'])
|
|
|
|
and from_cls.owns_id(inner_obj_id)):
|
|
|
|
logger.info('Fetching object so we can render it in feeds')
|
|
|
|
inner_obj = from_cls.load(inner_obj_id)
|
2023-07-02 21:57:05 +00:00
|
|
|
if inner_obj and inner_obj.as1:
|
2023-07-02 15:34:25 +00:00
|
|
|
obj.our_as1 = {
|
|
|
|
**obj.as1,
|
|
|
|
'object': {
|
|
|
|
**inner_obj_as1,
|
|
|
|
**inner_obj.as1,
|
|
|
|
}
|
|
|
|
}
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
if obj.type == 'follow':
|
2023-09-20 04:46:41 +00:00
|
|
|
from_cls.handle_follow(obj)
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-07-02 21:57:05 +00:00
|
|
|
# deliver to targets
|
2023-11-26 04:07:14 +00:00
|
|
|
return from_cls.deliver(obj, from_user=from_user)
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-03-11 20:58:36 +00:00
|
|
|
@classmethod
|
2023-09-20 04:46:41 +00:00
|
|
|
def handle_follow(from_cls, obj):
|
2023-07-10 19:00:42 +00:00
|
|
|
"""Handles an incoming follow activity.
|
2023-03-11 20:58:36 +00:00
|
|
|
|
2023-10-18 20:51:34 +00:00
|
|
|
Sends an ``Accept`` back, but doesn't send the ``Follow`` itself. That
|
|
|
|
happens in :meth:`deliver`.
|
|
|
|
|
2023-03-11 20:58:36 +00:00
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object): follow activity
|
2023-03-11 20:58:36 +00:00
|
|
|
"""
|
2023-07-10 19:00:42 +00:00
|
|
|
logger.info('Got follow. Loading users, storing Follow(s), sending accept(s)')
|
2023-06-27 03:22:06 +00:00
|
|
|
|
2023-07-10 19:00:42 +00:00
|
|
|
# Prepare follower (from) users' data
|
2023-06-27 03:22:06 +00:00
|
|
|
from_as1 = as1.get_object(obj.as1, 'actor')
|
|
|
|
from_id = from_as1.get('id')
|
2023-07-10 19:00:42 +00:00
|
|
|
if not from_id:
|
|
|
|
error(f'Follow activity requires actor. Got: {obj.as1}')
|
2023-03-11 20:58:36 +00:00
|
|
|
|
2023-06-27 03:22:06 +00:00
|
|
|
from_obj = from_cls.load(from_id)
|
2023-07-14 19:45:47 +00:00
|
|
|
if not from_obj:
|
|
|
|
error(f"Couldn't load {from_id}")
|
2023-07-10 19:00:42 +00:00
|
|
|
|
2023-06-27 03:22:06 +00:00
|
|
|
if not from_obj.as1:
|
|
|
|
from_obj.our_as1 = from_as1
|
|
|
|
from_obj.put()
|
|
|
|
|
2023-06-27 19:07:08 +00:00
|
|
|
from_target = from_cls.target_for(from_obj)
|
|
|
|
if not from_target:
|
2023-06-27 03:22:06 +00:00
|
|
|
error(f"Couldn't find delivery target for follower {from_obj}")
|
|
|
|
|
|
|
|
from_key = from_cls.key_for(from_id)
|
2023-07-23 06:32:55 +00:00
|
|
|
if not from_key:
|
|
|
|
error(f'Invalid {from_cls} user key: {from_id}')
|
2023-07-10 19:00:42 +00:00
|
|
|
obj.users = [from_key]
|
|
|
|
|
|
|
|
# Prepare followee (to) users' data
|
|
|
|
to_as1s = as1.get_objects(obj.as1)
|
|
|
|
if not to_as1s:
|
|
|
|
error(f'Follow activity requires object(s). Got: {obj.as1}')
|
|
|
|
|
|
|
|
# Store Followers
|
|
|
|
for to_as1 in to_as1s:
|
|
|
|
to_id = to_as1.get('id')
|
|
|
|
if not to_id or not from_id:
|
|
|
|
error(f'Follow activity requires object(s). Got: {obj.as1}')
|
2023-10-18 20:51:34 +00:00
|
|
|
from_user = from_cls.get_or_create(id=from_key.id(), obj=from_obj)
|
2023-07-10 19:00:42 +00:00
|
|
|
|
2023-10-18 19:14:18 +00:00
|
|
|
logger.info(f'Follow {from_id} => {to_id}')
|
2023-07-10 19:00:42 +00:00
|
|
|
to_cls = Protocol.for_id(to_id)
|
2023-07-25 16:53:23 +00:00
|
|
|
if not to_cls:
|
|
|
|
error(f"Couldn't determine protocol for {to_id}")
|
|
|
|
elif from_cls == to_cls and from_cls.LABEL != 'fake':
|
2023-07-13 21:19:01 +00:00
|
|
|
logger.info(f'Skipping same-protocol Follower {from_id} => {to_id}')
|
|
|
|
continue
|
|
|
|
|
2023-07-10 19:00:42 +00:00
|
|
|
to_obj = to_cls.load(to_id)
|
2023-07-14 19:45:47 +00:00
|
|
|
if to_obj and not to_obj.as1:
|
2023-07-10 19:00:42 +00:00
|
|
|
to_obj.our_as1 = to_as1
|
|
|
|
to_obj.put()
|
|
|
|
|
|
|
|
# If followee user is already direct, follower may not know they're
|
2023-07-16 21:06:03 +00:00
|
|
|
# interacting with a bridge. if followee user is indirect though,
|
|
|
|
# follower should know, so they're direct.
|
2023-07-10 19:00:42 +00:00
|
|
|
to_key = to_cls.key_for(to_id)
|
2023-07-23 06:32:55 +00:00
|
|
|
if not to_key:
|
|
|
|
logger.info(f'Skipping invalid {from_cls} user key: {from_id}')
|
|
|
|
continue
|
|
|
|
|
2023-07-10 19:00:42 +00:00
|
|
|
to_user = to_cls.get_or_create(id=to_key.id(), obj=to_obj, direct=False)
|
|
|
|
|
|
|
|
# HACK: we rewrite direct here for each followee, so the last one
|
|
|
|
# wins. Could we do something better?
|
|
|
|
from_user = from_cls.get_or_create(id=from_key.id(), obj=from_obj,
|
|
|
|
direct=not to_user.direct)
|
|
|
|
follower_obj = Follower.get_or_create(to=to_user, from_=from_user,
|
|
|
|
follow=obj.key, status='active')
|
2023-10-07 20:51:59 +00:00
|
|
|
obj.add('notify', to_key)
|
2023-07-10 19:00:42 +00:00
|
|
|
|
2023-11-27 22:44:05 +00:00
|
|
|
if not to_user.HAS_FOLLOW_ACCEPTS:
|
|
|
|
# send accept. note that this is one accept for the whole
|
|
|
|
# follow, even if it has multiple followees!
|
2023-12-01 00:31:41 +00:00
|
|
|
id = to_user.id_as('activitypub') + f'/followers#accept-{obj.key.id()}'
|
2023-11-27 22:44:05 +00:00
|
|
|
accept = Object.get_or_create(id, our_as1={
|
|
|
|
'id': id,
|
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'accept',
|
|
|
|
'actor': to_id,
|
|
|
|
'object': obj.as1,
|
|
|
|
})
|
|
|
|
|
|
|
|
sent = from_cls.send(accept, from_target, from_user=to_user)
|
|
|
|
if sent:
|
|
|
|
accept.populate(
|
|
|
|
delivered=[Target(protocol=from_cls.LABEL, uri=from_target)],
|
|
|
|
status='complete',
|
|
|
|
)
|
|
|
|
accept.put()
|
2023-03-11 20:58:36 +00:00
|
|
|
|
2023-03-21 02:17:55 +00:00
|
|
|
@classmethod
|
2023-09-20 04:46:41 +00:00
|
|
|
def handle_bare_object(cls, obj):
|
2023-07-01 15:03:47 +00:00
|
|
|
"""If obj is a bare object, wraps it in a create or update activity.
|
|
|
|
|
|
|
|
Checks if we've seen it before.
|
2023-03-21 02:17:55 +00:00
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object)
|
2023-07-01 15:03:47 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
models.Object: ``obj`` if it's an activity, otherwise a new object
|
2023-03-21 02:17:55 +00:00
|
|
|
"""
|
2023-07-01 15:03:47 +00:00
|
|
|
if obj.type not in ('note', 'article', 'comment'):
|
2023-07-02 05:40:42 +00:00
|
|
|
return obj
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-07-14 19:45:47 +00:00
|
|
|
obj_actor = as1.get_owner(obj.as1)
|
|
|
|
now = util.now().isoformat()
|
|
|
|
|
2023-07-01 15:03:47 +00:00
|
|
|
# this is a raw post; wrap it in a create or update activity
|
|
|
|
if obj.changed:
|
|
|
|
logger.info(f'Content has changed from last time at {obj.updated}! Redelivering to all inboxes')
|
|
|
|
id = f'{obj.key.id()}#bridgy-fed-update-{now}'
|
|
|
|
update_as1 = {
|
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'update',
|
|
|
|
'id': id,
|
|
|
|
'actor': obj_actor,
|
|
|
|
'object': {
|
|
|
|
# Mastodon requires the updated field for Updates, so
|
|
|
|
# add a default value.
|
|
|
|
# https://docs.joinmastodon.org/spec/activitypub/#supported-activities-for-statuses
|
|
|
|
# https://socialhub.activitypub.rocks/t/what-could-be-the-reason-that-my-update-activity-does-not-work/2893/4
|
|
|
|
# https://github.com/mastodon/documentation/pull/1150
|
|
|
|
'updated': now,
|
|
|
|
**obj.as1,
|
|
|
|
},
|
|
|
|
}
|
2023-07-09 14:53:33 +00:00
|
|
|
logger.info(f'Wrapping in update: {json_dumps(update_as1, indent=2)}')
|
2023-07-24 21:07:44 +00:00
|
|
|
return Object(id=id, our_as1=update_as1,
|
|
|
|
source_protocol=obj.source_protocol)
|
|
|
|
|
|
|
|
create_id = f'{obj.key.id()}#bridgy-fed-create'
|
|
|
|
create = cls.load(create_id, remote=False)
|
|
|
|
if (obj.new or not create or create.status != 'complete'
|
|
|
|
# HACK: force query param here is specific to webmention
|
|
|
|
or 'force' in request.form):
|
|
|
|
if create:
|
|
|
|
logger.info(f'Existing create {create.key} status {create.status}')
|
|
|
|
else:
|
|
|
|
logger.info(f'No existing create activity')
|
2023-07-01 15:03:47 +00:00
|
|
|
create_as1 = {
|
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'post',
|
2023-07-24 21:07:44 +00:00
|
|
|
'id': create_id,
|
2023-07-01 15:03:47 +00:00
|
|
|
'actor': obj_actor,
|
|
|
|
'object': obj.as1,
|
|
|
|
'published': now,
|
|
|
|
}
|
2023-07-09 14:53:33 +00:00
|
|
|
logger.info(f'Wrapping in post: {json_dumps(create_as1, indent=2)}')
|
2023-07-24 21:07:44 +00:00
|
|
|
return Object.get_or_create(create_id, our_as1=create_as1,
|
|
|
|
source_protocol=obj.source_protocol)
|
2023-07-01 15:03:47 +00:00
|
|
|
|
2023-07-24 21:07:44 +00:00
|
|
|
error(f'{obj.key.id()} is unchanged, nothing to do', status=204)
|
2023-07-01 15:03:47 +00:00
|
|
|
|
|
|
|
@classmethod
|
2023-11-26 04:07:14 +00:00
|
|
|
def deliver(from_cls, obj, from_user):
|
2023-07-01 15:03:47 +00:00
|
|
|
"""Delivers an activity to its external recipients.
|
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object): activity to deliver
|
2023-11-26 04:07:14 +00:00
|
|
|
from_user (models.User): user (actor) this activity is from
|
2023-07-01 15:03:47 +00:00
|
|
|
"""
|
2023-07-01 13:48:51 +00:00
|
|
|
# find delivery targets
|
2023-09-20 04:46:41 +00:00
|
|
|
targets = from_cls.targets(obj) # maps Target to Object or None
|
2023-07-01 13:48:51 +00:00
|
|
|
|
|
|
|
if not targets:
|
|
|
|
obj.status = 'ignored'
|
|
|
|
obj.put()
|
2023-11-12 21:21:35 +00:00
|
|
|
error(r'No targets, nothing to do ¯\_(ツ)_/¯', status=204)
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-11-13 22:07:56 +00:00
|
|
|
# sort targets so order is deterministic for tests, debugging, etc
|
2023-07-01 13:48:51 +00:00
|
|
|
sorted_targets = sorted(targets.items(), key=lambda t: t[0].uri)
|
|
|
|
obj.populate(
|
|
|
|
status='in progress',
|
|
|
|
delivered=[],
|
|
|
|
failed=[],
|
|
|
|
undelivered=[t for t, _ in sorted_targets],
|
|
|
|
)
|
2023-10-31 19:49:15 +00:00
|
|
|
obj.put()
|
2023-07-01 13:48:51 +00:00
|
|
|
logger.info(f'Delivering to: {obj.undelivered}')
|
|
|
|
|
2023-10-31 19:49:15 +00:00
|
|
|
# enqueue send task for each targets
|
2023-11-26 04:07:14 +00:00
|
|
|
user = from_user.key.urlsafe()
|
2023-10-31 19:49:15 +00:00
|
|
|
for i, (target, orig_obj) in enumerate(sorted_targets):
|
|
|
|
orig_obj = orig_obj.key.urlsafe() if orig_obj else ''
|
|
|
|
common.create_task(queue='send', obj=obj.key.urlsafe(),
|
|
|
|
url=target.uri, protocol=target.protocol,
|
|
|
|
orig_obj=orig_obj, user=user)
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-10-31 19:49:15 +00:00
|
|
|
return 'OK', 202
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-07-03 15:05:29 +00:00
|
|
|
@classmethod
|
2023-09-20 04:46:41 +00:00
|
|
|
def targets(cls, obj):
|
2023-10-06 15:22:50 +00:00
|
|
|
"""Collects the targets to send a :class:`models.Object` to.
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-09-20 02:59:28 +00:00
|
|
|
Targets are both objects - original posts, events, etc - and actors.
|
|
|
|
|
2023-07-01 13:48:51 +00:00
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
obj (models.Object)
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-09-20 04:46:41 +00:00
|
|
|
Returns:
|
2023-10-06 15:22:50 +00:00
|
|
|
dict: maps :class:`models.Target` to original (in response to)
|
|
|
|
:class:`models.Object`, if any, otherwise None
|
2023-07-01 13:48:51 +00:00
|
|
|
"""
|
|
|
|
logger.info('Finding recipients and their targets')
|
|
|
|
|
2023-11-13 22:07:56 +00:00
|
|
|
target_uris = sorted(set(as1.targets(obj.as1)))
|
2023-09-20 04:46:41 +00:00
|
|
|
logger.info(f'Raw targets: {target_uris}')
|
2023-07-01 13:48:51 +00:00
|
|
|
orig_obj = None
|
2023-09-20 04:46:41 +00:00
|
|
|
targets = {} # maps Target to Object or None
|
2023-10-06 21:57:36 +00:00
|
|
|
owner = as1.get_owner(obj.as1)
|
2023-09-20 04:46:41 +00:00
|
|
|
|
2023-11-11 22:52:21 +00:00
|
|
|
in_reply_to_owners = []
|
2023-11-13 20:58:52 +00:00
|
|
|
in_reply_tos = as1.get_ids(as1.get_object(obj.as1), 'inReplyTo')
|
2023-11-11 22:52:21 +00:00
|
|
|
for in_reply_to in in_reply_tos:
|
|
|
|
if protocol := Protocol.for_id(in_reply_to):
|
|
|
|
if in_reply_to_obj := protocol.load(in_reply_to):
|
|
|
|
if reply_owner := as1.get_owner(in_reply_to_obj.as1):
|
|
|
|
in_reply_to_owners.append(reply_owner)
|
2023-11-10 22:39:32 +00:00
|
|
|
is_self_reply = False
|
|
|
|
|
2023-09-20 04:46:41 +00:00
|
|
|
for id in sorted(target_uris):
|
2023-07-01 13:48:51 +00:00
|
|
|
protocol = Protocol.for_id(id)
|
|
|
|
if not protocol:
|
|
|
|
logger.info(f"Can't determine protocol for {id}")
|
|
|
|
continue
|
2023-07-13 21:19:01 +00:00
|
|
|
elif protocol == cls and cls.LABEL != 'fake':
|
|
|
|
logger.info(f'Skipping same-protocol target {id}')
|
|
|
|
continue
|
2023-09-09 22:11:52 +00:00
|
|
|
elif protocol.is_blocklisted(id):
|
|
|
|
logger.info(f'{id} is blocklisted')
|
|
|
|
continue
|
2023-11-11 22:52:21 +00:00
|
|
|
elif id in in_reply_to_owners:
|
2023-11-10 22:39:32 +00:00
|
|
|
logger.info(f'Skipping mention of in-reply-to author')
|
|
|
|
continue
|
2023-07-01 13:48:51 +00:00
|
|
|
|
|
|
|
orig_obj = protocol.load(id)
|
|
|
|
if not orig_obj or not orig_obj.as1:
|
|
|
|
logger.info(f"Couldn't load {id}")
|
|
|
|
continue
|
|
|
|
|
2023-10-06 21:57:36 +00:00
|
|
|
# deliver self-replies to followers
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/639
|
|
|
|
if owner == as1.get_owner(orig_obj.as1):
|
|
|
|
is_self_reply = True
|
|
|
|
logger.info(f'Looks like a self reply! Delivering to all followers')
|
|
|
|
|
2023-07-01 13:48:51 +00:00
|
|
|
target = protocol.target_for(orig_obj)
|
|
|
|
if not target:
|
|
|
|
# TODO: surface errors like this somehow?
|
|
|
|
logger.error(f"Can't find delivery target for {id}")
|
|
|
|
continue
|
|
|
|
|
|
|
|
logger.info(f'Target for {id} is {target}')
|
|
|
|
targets[Target(protocol=protocol.LABEL, uri=target)] = orig_obj
|
2023-11-20 04:52:03 +00:00
|
|
|
orig_user = protocol.actor_key(orig_obj)
|
2023-07-01 13:48:51 +00:00
|
|
|
if orig_user:
|
2023-07-03 15:30:13 +00:00
|
|
|
logger.info(f'Recipient is {orig_user}')
|
2023-10-07 20:51:59 +00:00
|
|
|
obj.add('notify', orig_user)
|
2023-07-01 13:48:51 +00:00
|
|
|
|
2023-07-16 21:06:03 +00:00
|
|
|
logger.info(f'Direct targets: {targets.keys()}')
|
2023-07-15 21:23:09 +00:00
|
|
|
|
2023-07-16 21:06:03 +00:00
|
|
|
# deliver to followers, if appropriate
|
2023-11-20 04:52:03 +00:00
|
|
|
user_key = cls.actor_key(obj)
|
2023-07-03 15:05:29 +00:00
|
|
|
if not user_key:
|
|
|
|
logger.info("Can't tell who this is from! Skipping followers.")
|
|
|
|
return targets
|
|
|
|
|
2023-11-11 22:52:21 +00:00
|
|
|
is_reply = obj.type == 'comment' or in_reply_tos
|
2023-07-02 21:57:05 +00:00
|
|
|
if (obj.type in ('post', 'update', 'delete', 'share')
|
2023-10-06 21:57:36 +00:00
|
|
|
and (is_self_reply or not is_reply)):
|
2023-07-03 15:05:29 +00:00
|
|
|
logger.info(f'Delivering to followers of {user_key}')
|
|
|
|
followers = Follower.query(Follower.to == user_key,
|
2023-07-01 13:48:51 +00:00
|
|
|
Follower.status == 'active'
|
|
|
|
).fetch()
|
|
|
|
users = [u for u in ndb.get_multi(f.from_ for f in followers) if u]
|
|
|
|
User.load_multi(users)
|
2023-07-16 21:06:03 +00:00
|
|
|
|
|
|
|
# which object should we add to followers' feeds, if any
|
|
|
|
feed_obj = None
|
|
|
|
if obj.type == 'share':
|
|
|
|
feed_obj = obj
|
|
|
|
else:
|
|
|
|
inner = as1.get_object(obj.as1)
|
|
|
|
# don't add profile updates to feeds
|
|
|
|
if not (obj.type == 'update'
|
|
|
|
and inner.get('objectType') in as1.ACTOR_TYPES):
|
|
|
|
inner_id = inner.get('id')
|
|
|
|
if inner_id:
|
|
|
|
feed_obj = cls.load(inner_id)
|
2023-07-01 13:48:51 +00:00
|
|
|
|
|
|
|
for user in users:
|
2023-07-17 15:31:28 +00:00
|
|
|
if feed_obj:
|
2023-10-07 20:51:59 +00:00
|
|
|
feed_obj.add('feed', user.key)
|
2023-07-17 15:31:28 +00:00
|
|
|
|
2023-07-01 13:48:51 +00:00
|
|
|
# TODO: should we pass remote=False through here to Protocol.load?
|
|
|
|
target = user.target_for(user.obj, shared=True) if user.obj else None
|
|
|
|
if not target:
|
|
|
|
# TODO: surface errors like this somehow?
|
|
|
|
logger.error(f'Follower {user.key} has no delivery target')
|
|
|
|
continue
|
|
|
|
|
2023-07-11 20:12:49 +00:00
|
|
|
# normalize URL (lower case hostname, etc)
|
|
|
|
target = util.dedupe_urls([target])[0]
|
|
|
|
|
2023-07-01 13:48:51 +00:00
|
|
|
# HACK: use last target object from above for reposts, which
|
|
|
|
# has its resolved id
|
2023-07-10 21:58:45 +00:00
|
|
|
targets[Target(protocol=user.LABEL, uri=target)] = \
|
2023-07-15 21:23:09 +00:00
|
|
|
orig_obj if obj.as1.get('verb') == 'share' else None
|
2023-07-10 21:58:45 +00:00
|
|
|
|
2023-07-16 21:06:03 +00:00
|
|
|
if feed_obj:
|
|
|
|
feed_obj.put()
|
|
|
|
|
2023-09-09 22:11:52 +00:00
|
|
|
# de-dupe targets, discard same-domain
|
2023-07-10 21:58:45 +00:00
|
|
|
candidates = {t.uri: (t, obj) for t, obj in targets.items()}
|
|
|
|
targets = {}
|
|
|
|
source_domains = [
|
|
|
|
util.domain_from_link(url) for url in
|
|
|
|
(obj.as1.get('id'), obj.as1.get('url'), as1.get_owner(obj.as1))
|
|
|
|
if util.is_web(url)
|
|
|
|
]
|
|
|
|
for url in sorted(util.dedupe_urls(candidates.keys())):
|
2023-09-09 22:11:52 +00:00
|
|
|
if util.is_web(url) and util.domain_from_link(url) in source_domains:
|
2023-07-10 21:58:45 +00:00
|
|
|
logger.info(f'Skipping same-domain target {url}')
|
|
|
|
else:
|
|
|
|
target, obj = candidates[url]
|
|
|
|
targets[target] = obj
|
2023-07-01 13:48:51 +00:00
|
|
|
|
|
|
|
return targets
|
2023-03-21 02:17:55 +00:00
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
@classmethod
|
2023-06-18 14:29:54 +00:00
|
|
|
def load(cls, id, remote=None, local=True, **kwargs):
|
2023-03-08 21:10:41 +00:00
|
|
|
"""Loads and returns an Object from memory cache, datastore, or HTTP fetch.
|
|
|
|
|
2023-06-28 22:27:11 +00:00
|
|
|
Sets the :attr:`new` and :attr:`changed` attributes if we know either
|
|
|
|
one for the loaded object, ie local is True and remote is True or None.
|
|
|
|
|
2023-03-08 21:10:41 +00:00
|
|
|
Note that :meth:`Object._post_put_hook` updates the cache.
|
|
|
|
|
|
|
|
Args:
|
2023-10-06 06:32:31 +00:00
|
|
|
id (str)
|
|
|
|
remote (bool): whether to fetch the object over the network. If True,
|
2023-06-18 14:29:54 +00:00
|
|
|
fetches even if we already have the object stored, and updates our
|
|
|
|
stored copy. If False and we don't have the object stored, returns
|
|
|
|
None. Default (None) means to fetch over the network only if we
|
|
|
|
don't already have it stored.
|
2023-10-06 06:32:31 +00:00
|
|
|
local (bool): whether to load from the datastore before
|
2023-06-18 14:29:54 +00:00
|
|
|
fetching over the network. If False, still stores back to the
|
|
|
|
datastore after a successful remote fetch.
|
2023-06-14 03:24:09 +00:00
|
|
|
kwargs: passed through to :meth:`fetch()`
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-10-06 15:22:50 +00:00
|
|
|
Returns:
|
|
|
|
models.Object: loaded object, or None if it isn't fetchable, eg a
|
|
|
|
non-URL string for Web, or ``remote`` is False and it isn't in the
|
|
|
|
cache or datastore
|
2023-03-08 21:10:41 +00:00
|
|
|
|
|
|
|
Raises:
|
2023-10-06 06:32:31 +00:00
|
|
|
requests.HTTPError: anything that :meth:`fetch` raises
|
2023-03-08 21:10:41 +00:00
|
|
|
"""
|
2023-06-18 14:29:54 +00:00
|
|
|
assert local or remote is not False
|
|
|
|
|
|
|
|
if remote is not True:
|
2023-04-03 14:53:15 +00:00
|
|
|
with objects_cache_lock:
|
|
|
|
cached = objects_cache.get(id)
|
|
|
|
if cached:
|
2023-06-22 19:30:25 +00:00
|
|
|
# make a copy so that if the client modifies this entity in
|
|
|
|
# memory, those modifications aren't applied to the cache
|
|
|
|
# until they explicitly put() the modified entity.
|
2023-06-23 18:05:12 +00:00
|
|
|
# NOTE: keep in sync with Object._post_put_hook!
|
2023-06-22 19:30:25 +00:00
|
|
|
return Object(id=cached.key.id(), **cached.to_dict(
|
|
|
|
# computed properties
|
|
|
|
exclude=['as1', 'expire', 'object_ids', 'type']))
|
2023-04-03 03:36:23 +00:00
|
|
|
|
2023-06-18 14:29:54 +00:00
|
|
|
obj = orig_as1 = None
|
|
|
|
if local:
|
|
|
|
obj = Object.get_by_id(id)
|
2023-08-31 17:48:28 +00:00
|
|
|
if obj and (obj.as1 or obj.raw or obj.deleted):
|
2023-06-18 14:29:54 +00:00
|
|
|
logger.info(' got from datastore')
|
|
|
|
obj.new = False
|
|
|
|
orig_as1 = obj.as1
|
|
|
|
if remote is not True:
|
|
|
|
with objects_cache_lock:
|
|
|
|
objects_cache[id] = obj
|
|
|
|
return obj
|
|
|
|
|
|
|
|
if remote is True:
|
2023-10-17 18:34:52 +00:00
|
|
|
logger.debug(f'Loading Object {id} local={local} remote={remote}, forced refresh requested')
|
2023-06-19 05:26:30 +00:00
|
|
|
elif remote is False:
|
2023-10-17 18:34:52 +00:00
|
|
|
logger.debug(f'Loading Object {id} local={local} remote={remote} {"empty" if obj else "not"} in datastore')
|
2023-06-19 05:26:30 +00:00
|
|
|
return obj
|
2023-04-03 14:53:15 +00:00
|
|
|
|
|
|
|
if obj:
|
2023-06-03 04:53:44 +00:00
|
|
|
obj.new = False
|
2023-04-03 14:53:15 +00:00
|
|
|
else:
|
2023-06-28 22:27:11 +00:00
|
|
|
obj = Object(id=id)
|
2023-06-18 14:29:54 +00:00
|
|
|
if local:
|
|
|
|
logger.info(' not in datastore')
|
2023-06-28 22:27:11 +00:00
|
|
|
obj.new = True
|
|
|
|
obj.changed = False
|
2023-04-03 14:53:15 +00:00
|
|
|
|
2023-07-14 19:45:47 +00:00
|
|
|
fetched = cls.fetch(obj, **kwargs)
|
|
|
|
if not fetched:
|
|
|
|
return None
|
|
|
|
|
2023-10-24 17:46:57 +00:00
|
|
|
obj.resolve_ids()
|
2023-10-18 18:18:20 +00:00
|
|
|
|
2023-06-28 22:27:11 +00:00
|
|
|
if obj.new is False:
|
2023-07-01 21:24:18 +00:00
|
|
|
obj.changed = obj.activity_changed(orig_as1)
|
2023-03-08 21:10:41 +00:00
|
|
|
|
2023-11-15 22:23:08 +00:00
|
|
|
if obj.source_protocol not in (cls.LABEL, cls.ABBREV):
|
|
|
|
assert not obj.source_protocol
|
|
|
|
obj.source_protocol = cls.LABEL
|
|
|
|
obj.put()
|
2023-04-03 03:36:23 +00:00
|
|
|
|
|
|
|
with objects_cache_lock:
|
|
|
|
objects_cache[id] = obj
|
2023-03-08 21:10:41 +00:00
|
|
|
return obj
|
2023-09-19 02:19:59 +00:00
|
|
|
|
|
|
|
|
2023-09-29 20:49:17 +00:00
|
|
|
@app.post('/queue/receive')
|
2023-10-16 20:04:34 +00:00
|
|
|
@cloud_tasks_only
|
2023-09-19 02:19:59 +00:00
|
|
|
def receive_task():
|
2023-10-06 06:32:31 +00:00
|
|
|
"""Task handler for a newly received :class:`models.Object`.
|
|
|
|
|
2023-10-16 19:45:27 +00:00
|
|
|
Calls :meth:`Protocol.receive` with the form parameters.
|
|
|
|
|
2023-10-06 06:32:31 +00:00
|
|
|
Parameters:
|
2023-10-31 19:49:15 +00:00
|
|
|
obj (url-safe google.cloud.ndb.key.Key): :class:`models.Object` to handle
|
2023-10-16 19:45:27 +00:00
|
|
|
authed_as (str): passed to :meth:`Protocol.receive`
|
2023-10-06 06:32:31 +00:00
|
|
|
|
|
|
|
TODO: migrate incoming webmentions and AP inbox deliveries to this. The
|
|
|
|
difficulty is that parts of :meth:`protocol.Protocol.receive` depend on
|
|
|
|
setup in :func:`web.webmention` and :func:`activitypub.inbox`, eg
|
2023-11-26 04:23:19 +00:00
|
|
|
:class:`models.Object` with ``new`` and ``changed``, HTTP request details,
|
|
|
|
etc. See stash for attempt at this for :class:`web.Web`.
|
2023-09-19 02:19:59 +00:00
|
|
|
"""
|
2023-10-16 19:45:27 +00:00
|
|
|
form = request.form.to_dict()
|
|
|
|
logger.info(f'Params: {list(form.items())}')
|
2023-09-19 02:19:59 +00:00
|
|
|
|
2023-11-07 23:35:03 +00:00
|
|
|
obj = ndb.Key(urlsafe=form['obj']).get()
|
2023-09-19 02:19:59 +00:00
|
|
|
assert obj
|
2023-10-17 18:59:50 +00:00
|
|
|
obj.new = True
|
|
|
|
|
2023-11-07 23:35:03 +00:00
|
|
|
authed_as = form.get('authed_as')
|
|
|
|
|
2023-09-19 18:15:49 +00:00
|
|
|
try:
|
2023-11-07 23:35:03 +00:00
|
|
|
return PROTOCOLS[obj.source_protocol].receive(obj=obj, authed_as=authed_as)
|
2023-09-19 18:15:49 +00:00
|
|
|
except ValueError as e:
|
|
|
|
logger.warning(e, exc_info=True)
|
|
|
|
error(e, status=304)
|
2023-10-31 19:49:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.post('/queue/send')
|
|
|
|
@cloud_tasks_only
|
|
|
|
def send_task():
|
|
|
|
"""Task handler for sending an activity to a single specific destination.
|
|
|
|
|
|
|
|
Calls :meth:`Protocol.send` with the form parameters.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
protocol (str): :class:`Protocol` to send to
|
|
|
|
url (str): destination URL to send to
|
|
|
|
obj (url-safe google.cloud.ndb.key.Key): :class:`models.Object` to send
|
|
|
|
orig_obj (url-safe google.cloud.ndb.key.Key): optional "original object"
|
|
|
|
:class:`models.Object` that this object refers to, eg replies to or
|
|
|
|
reposts or likes
|
2023-11-26 04:07:14 +00:00
|
|
|
user (url-safe google.cloud.ndb.key.Key): :class:`models.User` (actor)
|
|
|
|
this activity is from
|
2023-10-31 19:49:15 +00:00
|
|
|
"""
|
|
|
|
form = request.form.to_dict()
|
|
|
|
logger.info(f'Params: {list(form.items())}')
|
|
|
|
|
|
|
|
# prepare
|
|
|
|
url = form['url']
|
|
|
|
protocol = form['protocol']
|
|
|
|
target = Target(uri=url, protocol=protocol)
|
|
|
|
|
|
|
|
obj = ndb.Key(urlsafe=form['obj']).get()
|
2023-11-14 23:24:51 +00:00
|
|
|
logging.info(f'Sending {obj.key.id()} AS1: {json_dumps(obj.as1, indent=2)}')
|
|
|
|
|
2023-11-07 23:35:03 +00:00
|
|
|
if (target not in obj.undelivered and target not in obj.failed
|
|
|
|
and 'force' not in request.values):
|
2023-10-31 19:49:15 +00:00
|
|
|
logger.info(f"{url} not in {obj.key.id()} undelivered or failed, giving up")
|
2023-11-12 21:21:35 +00:00
|
|
|
return r'¯\_(ツ)_/¯', 204
|
2023-10-31 19:49:15 +00:00
|
|
|
|
2023-11-26 04:07:14 +00:00
|
|
|
user = None
|
2023-10-31 19:49:15 +00:00
|
|
|
if user_key := form.get('user'):
|
2023-11-26 04:23:19 +00:00
|
|
|
user = ndb.Key(urlsafe=user_key).get()
|
2023-10-31 19:49:15 +00:00
|
|
|
orig_obj = (ndb.Key(urlsafe=form['orig_obj']).get()
|
|
|
|
if form.get('orig_obj') else None)
|
|
|
|
|
|
|
|
# send
|
|
|
|
sent = None
|
|
|
|
try:
|
2023-11-26 04:07:14 +00:00
|
|
|
sent = PROTOCOLS[protocol].send(obj, url, from_user=user, orig_obj=orig_obj)
|
2023-10-31 19:49:15 +00:00
|
|
|
except BaseException as e:
|
|
|
|
code, body = util.interpret_http_exception(e)
|
|
|
|
if not code and not body:
|
|
|
|
logger.info(str(e), exc_info=True)
|
|
|
|
|
|
|
|
# write results to Object
|
|
|
|
@ndb.transactional()
|
|
|
|
def update_object(obj_key):
|
|
|
|
obj = obj_key.get()
|
|
|
|
if target in obj.undelivered:
|
|
|
|
obj.remove('undelivered', target)
|
|
|
|
|
|
|
|
if sent is None:
|
|
|
|
obj.add('failed', target)
|
|
|
|
else:
|
|
|
|
if target in obj.failed:
|
|
|
|
obj.remove('failed', target)
|
|
|
|
if sent:
|
|
|
|
obj.add('delivered', target)
|
|
|
|
|
|
|
|
if not obj.undelivered:
|
|
|
|
obj.status = ('complete' if obj.delivered
|
|
|
|
else 'failed' if obj.failed
|
|
|
|
else 'ignored')
|
|
|
|
obj.put()
|
|
|
|
|
|
|
|
update_object(obj.key)
|
|
|
|
|
|
|
|
return '', 200 if sent else 304
|