2023-03-20 21:28:14 +00:00
|
|
|
"""Handles inbound webmentions."""
|
2023-05-31 00:24:49 +00:00
|
|
|
import datetime
|
2023-05-26 23:07:36 +00:00
|
|
|
import difflib
|
2017-08-15 14:39:22 +00:00
|
|
|
import logging
|
2023-06-09 17:58:28 +00:00
|
|
|
import re
|
2023-06-08 18:04:11 +00:00
|
|
|
import urllib.parse
|
2023-05-26 23:07:36 +00:00
|
|
|
from urllib.parse import urlencode, urljoin, urlparse
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2023-05-31 00:24:49 +00:00
|
|
|
from flask import g, redirect, render_template, request
|
2023-06-14 21:35:08 +00:00
|
|
|
from google.cloud import ndb
|
2023-06-20 18:22:54 +00:00
|
|
|
from google.cloud.ndb import ComputedProperty
|
2023-02-14 15:40:37 +00:00
|
|
|
from granary import as1, as2, microformats2
|
2017-08-15 14:39:22 +00:00
|
|
|
import mf2util
|
2021-07-18 04:22:13 +00:00
|
|
|
from oauth_dropins.webutil import flask_util, util
|
2023-01-05 04:48:39 +00:00
|
|
|
from oauth_dropins.webutil.appengine_config import tasks_client
|
|
|
|
from oauth_dropins.webutil.appengine_info import APP_ID
|
2023-01-24 02:57:49 +00:00
|
|
|
from oauth_dropins.webutil.flask_util import error, flash
|
2019-12-25 07:26:58 +00:00
|
|
|
from oauth_dropins.webutil.util import json_dumps, json_loads
|
2023-05-27 00:40:29 +00:00
|
|
|
from oauth_dropins.webutil import webmention
|
2023-04-17 22:36:29 +00:00
|
|
|
from requests import HTTPError, RequestException, URLRequired
|
2023-05-26 23:07:36 +00:00
|
|
|
from werkzeug.exceptions import BadGateway, BadRequest, HTTPException, NotFound
|
2017-08-15 14:39:22 +00:00
|
|
|
|
|
|
|
import common
|
2023-05-31 00:24:49 +00:00
|
|
|
from flask_app import app, cache
|
2023-05-26 23:07:36 +00:00
|
|
|
from models import Follower, Object, PROTOCOLS, Target, User
|
|
|
|
from protocol import Protocol
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2022-02-12 06:38:56 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-01-05 04:48:39 +00:00
|
|
|
# https://cloud.google.com/appengine/docs/locations
|
|
|
|
TASKS_LOCATION = 'us-central1'
|
|
|
|
|
2023-04-17 22:36:29 +00:00
|
|
|
CHAR_AFTER_SPACE = chr(ord(' ') + 1)
|
|
|
|
|
2023-05-26 23:07:36 +00:00
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/314
|
|
|
|
WWW_DOMAINS = frozenset((
|
|
|
|
'www.jvt.me',
|
|
|
|
))
|
2023-06-15 17:52:11 +00:00
|
|
|
NON_TLDS = frozenset(('html', 'json', 'php', 'xml'))
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2023-05-26 23:07:36 +00:00
|
|
|
|
2023-05-27 00:40:29 +00:00
|
|
|
class Web(User, Protocol):
|
|
|
|
"""Web user and webmention protocol implementation.
|
2023-05-26 23:07:36 +00:00
|
|
|
|
|
|
|
The key name is the domain.
|
|
|
|
"""
|
2023-06-11 15:14:17 +00:00
|
|
|
ABBREV = 'web'
|
|
|
|
OTHER_LABELS = ('webmention',)
|
2023-03-20 04:08:24 +00:00
|
|
|
|
2023-06-14 21:35:08 +00:00
|
|
|
has_redirects = ndb.BooleanProperty()
|
|
|
|
redirects_error = ndb.TextProperty()
|
|
|
|
has_hcard = ndb.BooleanProperty()
|
|
|
|
|
2023-05-26 23:07:36 +00:00
|
|
|
@classmethod
|
|
|
|
def _get_kind(cls):
|
|
|
|
return 'MagicKey'
|
|
|
|
|
2023-06-04 04:48:15 +00:00
|
|
|
@ComputedProperty
|
|
|
|
def readable_id(self):
|
2023-06-02 04:37:58 +00:00
|
|
|
# prettify if domain, noop if username
|
2023-06-04 04:48:15 +00:00
|
|
|
username = self.username()
|
|
|
|
if username != self.key.id():
|
|
|
|
return util.domain_from_link(username, minimize=False)
|
2023-06-02 04:37:58 +00:00
|
|
|
|
2023-06-23 19:22:37 +00:00
|
|
|
def _pre_put_hook(self):
|
|
|
|
"""Validate domain id, don't allow upper case or invalid characters."""
|
|
|
|
super()._pre_put_hook()
|
2023-06-09 17:58:28 +00:00
|
|
|
id = self.key.id()
|
|
|
|
assert re.match(common.DOMAIN_RE, id)
|
2023-06-16 19:05:41 +00:00
|
|
|
assert id.lower() == id, f'upper case is not allowed in Web key id: {id}'
|
2023-06-23 19:22:37 +00:00
|
|
|
assert id not in common.DOMAINS, f'{id} is a Bridgy Fed domain'
|
2023-06-09 17:58:28 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_or_create(cls, id, **kwargs):
|
|
|
|
"""Lower cases id (domain), then passes through to :meth:`User.get_or_create`."""
|
|
|
|
return super().get_or_create(id.lower(), **kwargs)
|
|
|
|
|
2023-06-01 01:34:33 +00:00
|
|
|
def web_url(self):
|
|
|
|
"""Returns this user's web URL aka web_url, eg 'https://foo.com/'."""
|
|
|
|
return f'https://{self.key.id()}/'
|
|
|
|
|
2023-05-31 17:47:09 +00:00
|
|
|
def ap_address(self):
|
|
|
|
"""Returns this user's ActivityPub address, eg '@foo.com@foo.com'.
|
|
|
|
|
|
|
|
Uses the user's domain if they're direct, fed.brid.gy if they're not.
|
|
|
|
"""
|
|
|
|
if self.direct:
|
|
|
|
return f'@{self.username()}@{self.key.id()}'
|
|
|
|
else:
|
|
|
|
return f'@{self.key.id()}@{request.host}'
|
|
|
|
|
|
|
|
def ap_actor(self, rest=None):
|
|
|
|
"""Returns this user's ActivityPub/AS2 actor id.
|
|
|
|
|
|
|
|
Eg 'https://fed.brid.gy/foo.com'
|
|
|
|
|
|
|
|
Web users are special cased to not have an /ap/web/ prefix, for backward
|
|
|
|
compatibility.
|
|
|
|
"""
|
2023-06-02 19:55:07 +00:00
|
|
|
url = common.host_url(self.key.id())
|
|
|
|
if rest:
|
|
|
|
url += f'/{rest}'
|
|
|
|
return url
|
2023-05-31 17:47:09 +00:00
|
|
|
|
2023-06-07 18:51:31 +00:00
|
|
|
def user_page_path(self, rest=None):
|
|
|
|
"""Always use domain."""
|
2023-06-11 15:14:17 +00:00
|
|
|
path = f'/{self.ABBREV}/{self.key.id()}'
|
2023-06-07 18:51:31 +00:00
|
|
|
|
|
|
|
if rest:
|
|
|
|
if not rest.startswith('?'):
|
|
|
|
path += '/'
|
|
|
|
path += rest
|
|
|
|
|
|
|
|
return path
|
|
|
|
|
2023-06-04 23:10:37 +00:00
|
|
|
def username(self):
|
|
|
|
"""Returns the user's preferred username.
|
|
|
|
|
|
|
|
Uses stored representative h-card if available, falls back to id.
|
|
|
|
|
|
|
|
Returns: str
|
|
|
|
"""
|
|
|
|
id = self.key.id()
|
|
|
|
|
2023-06-16 04:22:20 +00:00
|
|
|
if self.obj and self.obj.as1 and self.direct:
|
|
|
|
for url in (util.get_list(self.obj.as1, 'url') +
|
|
|
|
util.get_list(self.obj.as1, 'urls')):
|
|
|
|
url = url.get('value') if isinstance(url, dict) else url
|
2023-06-04 23:10:37 +00:00
|
|
|
if url and url.startswith('acct:'):
|
2023-06-14 20:34:29 +00:00
|
|
|
try:
|
|
|
|
urluser, urldomain = util.parse_acct_uri(url)
|
2023-06-27 03:22:06 +00:00
|
|
|
except ValueError as e:
|
2023-06-14 20:34:29 +00:00
|
|
|
continue
|
2023-06-04 23:10:37 +00:00
|
|
|
if urldomain == id:
|
|
|
|
logger.info(f'Found custom username: {urluser}')
|
|
|
|
return urluser
|
|
|
|
|
|
|
|
logger.info(f'Defaulting username to key id {id}')
|
|
|
|
return id
|
|
|
|
|
2023-05-26 23:07:36 +00:00
|
|
|
def verify(self):
|
|
|
|
"""Fetches site a couple ways to check for redirects and h-card.
|
|
|
|
|
|
|
|
|
2023-05-27 00:40:29 +00:00
|
|
|
Returns: :class:`Web` that was verified. May be different than
|
2023-05-26 23:07:36 +00:00
|
|
|
self! eg if self's domain started with www and we switch to the root
|
|
|
|
domain.
|
|
|
|
"""
|
|
|
|
domain = self.key.id()
|
|
|
|
logger.info(f'Verifying {domain}')
|
|
|
|
|
|
|
|
if domain.startswith('www.') and domain not in WWW_DOMAINS:
|
|
|
|
# if root domain redirects to www, use root domain instead
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/314
|
|
|
|
root = domain.removeprefix("www.")
|
|
|
|
root_site = f'https://{root}/'
|
|
|
|
try:
|
|
|
|
resp = util.requests_get(root_site, gateway=False)
|
2023-06-01 01:34:33 +00:00
|
|
|
if resp.ok and self.is_web_url(resp.url):
|
2023-05-26 23:07:36 +00:00
|
|
|
logger.info(f'{root_site} redirects to {resp.url} ; using {root} instead')
|
2023-06-07 21:24:00 +00:00
|
|
|
root_user = Web.get_or_create(root)
|
2023-05-26 23:07:36 +00:00
|
|
|
self.use_instead = root_user.key
|
|
|
|
self.put()
|
|
|
|
return root_user.verify()
|
|
|
|
except RequestException:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# check webfinger redirect
|
|
|
|
path = f'/.well-known/webfinger?resource=acct:{domain}@{domain}'
|
|
|
|
self.has_redirects = False
|
|
|
|
self.redirects_error = None
|
|
|
|
try:
|
2023-06-01 01:34:33 +00:00
|
|
|
url = urljoin(self.web_url(), path)
|
2023-05-26 23:07:36 +00:00
|
|
|
resp = util.requests_get(url, gateway=False)
|
|
|
|
domain_urls = ([f'https://{domain}/' for domain in common.DOMAINS] +
|
|
|
|
[common.host_url()])
|
|
|
|
expected = [urljoin(url, path) for url in domain_urls]
|
2023-06-08 18:04:11 +00:00
|
|
|
if resp.ok and resp.url:
|
|
|
|
got = urllib.parse.unquote(resp.url)
|
|
|
|
if got in expected:
|
2023-05-26 23:07:36 +00:00
|
|
|
self.has_redirects = True
|
2023-06-08 18:04:11 +00:00
|
|
|
elif got:
|
|
|
|
diff = '\n'.join(difflib.Differ().compare([got], [expected[0]]))
|
2023-05-26 23:07:36 +00:00
|
|
|
self.redirects_error = f'Current vs expected:<pre>{diff}</pre>'
|
|
|
|
else:
|
|
|
|
lines = [url, f' returned HTTP {resp.status_code}']
|
2023-06-08 18:04:11 +00:00
|
|
|
if resp.url and resp.url != url:
|
2023-05-26 23:07:36 +00:00
|
|
|
lines[1:1] = [' redirected to:', resp.url]
|
|
|
|
self.redirects_error = '<pre>' + '\n'.join(lines) + '</pre>'
|
|
|
|
except RequestException:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# check home page
|
2023-06-03 14:28:01 +00:00
|
|
|
try:
|
2023-06-21 03:59:32 +00:00
|
|
|
self.obj = Web.load(self.web_url(), remote=True, gateway=True)
|
2023-05-26 23:07:36 +00:00
|
|
|
self.has_hcard = True
|
2023-06-10 14:53:07 +00:00
|
|
|
except (BadRequest, NotFound, common.NoMicroformats):
|
2023-06-16 04:22:20 +00:00
|
|
|
self.obj = None
|
2023-05-26 23:07:36 +00:00
|
|
|
self.has_hcard = False
|
|
|
|
|
2023-06-16 04:22:20 +00:00
|
|
|
self.put()
|
2023-05-26 23:07:36 +00:00
|
|
|
return self
|
|
|
|
|
2023-06-13 21:30:00 +00:00
|
|
|
@classmethod
|
|
|
|
def key_for(cls, id):
|
|
|
|
"""Returns the :class:`ndb.Key` for a given id.
|
|
|
|
|
|
|
|
If id is a domain, uses it as is. If it's a home page URL or fed.brid.gy
|
|
|
|
or web.brid.gy AP actor URL, extracts the domain and uses that.
|
|
|
|
Otherwise, raises AssertionError.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
id: str
|
2023-06-15 17:52:11 +00:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
ValueError
|
2023-06-13 21:30:00 +00:00
|
|
|
"""
|
2023-06-15 17:52:11 +00:00
|
|
|
if not id:
|
|
|
|
raise ValueError()
|
2023-06-13 21:30:00 +00:00
|
|
|
|
2023-06-14 20:46:13 +00:00
|
|
|
if util.is_web(id):
|
2023-06-13 21:30:00 +00:00
|
|
|
parsed = urlparse(id)
|
|
|
|
if parsed.path in ('', '/'):
|
2023-06-14 20:46:13 +00:00
|
|
|
id = parsed.netloc
|
|
|
|
|
|
|
|
if re.match(common.DOMAIN_RE, id):
|
2023-06-15 17:52:11 +00:00
|
|
|
tld = id.split('.')[-1]
|
|
|
|
if tld in NON_TLDS:
|
|
|
|
raise ValueError(f"{id} looks like a domain but {tld} isn't a TLD")
|
2023-06-14 20:46:13 +00:00
|
|
|
return cls(id=id).key
|
2023-06-13 21:30:00 +00:00
|
|
|
|
2023-06-15 17:52:11 +00:00
|
|
|
raise ValueError(f'{id} is not a domain or usable home page URL')
|
2023-06-13 21:30:00 +00:00
|
|
|
|
2023-06-13 20:17:11 +00:00
|
|
|
@classmethod
|
|
|
|
def owns_id(cls, id):
|
2023-06-15 17:52:11 +00:00
|
|
|
"""Returns None if id is a domain or http(s) URL, False otherwise.
|
2023-06-13 20:17:11 +00:00
|
|
|
|
|
|
|
All web pages are http(s) URLs, but not all http(s) URLs are web pages.
|
|
|
|
"""
|
2023-06-14 20:46:13 +00:00
|
|
|
if not id:
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
key = cls.key_for(id)
|
|
|
|
if key:
|
|
|
|
user = key.get()
|
|
|
|
return True if user and user.has_redirects else None
|
2023-06-15 17:52:11 +00:00
|
|
|
except ValueError as e:
|
|
|
|
logger.info(e)
|
2023-06-14 20:46:13 +00:00
|
|
|
|
2023-06-13 20:17:11 +00:00
|
|
|
return None if util.is_web(id) else False
|
|
|
|
|
2023-06-16 20:16:17 +00:00
|
|
|
@classmethod
|
|
|
|
def target_for(cls, obj, shared=False):
|
|
|
|
"""Returns `obj`'s id, as a URL webmention target."""
|
2023-06-21 14:22:03 +00:00
|
|
|
# TODO: we have entities in prod that fail this, eg
|
|
|
|
# https://indieweb.social/users/bismark has source_protocol webmention
|
|
|
|
# assert obj.source_protocol in (cls.LABEL, cls.ABBREV, 'ui', None), str(obj)
|
2023-06-16 20:16:17 +00:00
|
|
|
|
|
|
|
if not util.is_web(obj.key.id()):
|
|
|
|
logger.warning(f"{obj.key} is source_protocol web but id isn't a URL!")
|
|
|
|
return None
|
|
|
|
|
|
|
|
return obj.key.id()
|
|
|
|
|
2023-03-20 04:08:24 +00:00
|
|
|
@classmethod
|
2023-06-21 03:59:32 +00:00
|
|
|
def send(cls, obj, url, **kwargs):
|
2023-03-23 03:49:28 +00:00
|
|
|
"""Sends a webmention to a given target URL.
|
|
|
|
|
|
|
|
See :meth:`Protocol.send` for details.
|
2023-06-04 22:11:52 +00:00
|
|
|
|
|
|
|
*Does not* propagate HTTP errors, DNS or connection failures, or other
|
|
|
|
exceptions, since webmention support is optional for web recipients.
|
|
|
|
https://fed.brid.gy/docs#error-handling
|
2023-03-23 03:49:28 +00:00
|
|
|
"""
|
2023-03-20 18:23:49 +00:00
|
|
|
source_url = obj.proxy_url()
|
|
|
|
logger.info(f'Sending webmention from {source_url} to {url}')
|
|
|
|
|
|
|
|
endpoint = common.webmention_discover(url).endpoint
|
2023-06-04 22:11:52 +00:00
|
|
|
try:
|
|
|
|
if endpoint:
|
|
|
|
webmention.send(endpoint, source_url, url)
|
|
|
|
return True
|
|
|
|
except RequestException as e:
|
|
|
|
# log exception, then ignore it
|
|
|
|
util.interpret_http_exception(e)
|
|
|
|
return False
|
2023-03-20 04:08:24 +00:00
|
|
|
|
|
|
|
@classmethod
|
2023-06-14 03:24:09 +00:00
|
|
|
def fetch(cls, obj, gateway=False, check_backlink=False, **kwargs):
|
2023-03-23 03:49:28 +00:00
|
|
|
"""Fetches a URL over HTTP and extracts its microformats2.
|
|
|
|
|
2023-04-03 14:53:15 +00:00
|
|
|
Follows redirects, but doesn't change the original URL in obj's id! The
|
|
|
|
:class:`Model` class doesn't allow that anyway, but more importantly, we
|
|
|
|
want to preserve that original URL becase other objects may refer to it
|
|
|
|
instead of the final redirect destination URL.
|
|
|
|
|
|
|
|
See :meth:`Protocol.fetch` for other background.
|
2023-04-17 22:36:29 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
gateway: passed through to :func:`webutil.util.fetch_mf2`
|
2023-06-14 03:24:09 +00:00
|
|
|
check_backlink: bool, optional, whether to require a link to Bridgy
|
|
|
|
Fed. Ignored if the URL is a homepage, ie has no path.
|
|
|
|
kwargs: ignored
|
2023-03-23 03:49:28 +00:00
|
|
|
"""
|
2023-04-03 14:53:15 +00:00
|
|
|
url = obj.key.id()
|
2023-06-02 19:55:07 +00:00
|
|
|
is_homepage = urlparse(url).path.strip('/') == ''
|
2023-05-23 16:31:09 +00:00
|
|
|
|
2023-06-14 03:24:09 +00:00
|
|
|
require_backlink = (common.host_url().rstrip('/')
|
|
|
|
if check_backlink and not is_homepage
|
|
|
|
else None)
|
2023-04-05 01:02:41 +00:00
|
|
|
|
2023-04-02 02:13:51 +00:00
|
|
|
try:
|
2023-04-17 22:36:29 +00:00
|
|
|
parsed = util.fetch_mf2(url, gateway=gateway,
|
|
|
|
require_backlink=require_backlink)
|
|
|
|
except (ValueError, URLRequired) as e:
|
2023-04-02 02:13:51 +00:00
|
|
|
error(str(e))
|
|
|
|
|
|
|
|
if parsed is None:
|
|
|
|
error(f'id {urlparse(url).fragment} not found in {url}')
|
|
|
|
|
|
|
|
# find mf2 item
|
2023-06-02 19:55:07 +00:00
|
|
|
if is_homepage:
|
2023-06-01 01:34:33 +00:00
|
|
|
logger.info(f"{url} is user's web url")
|
2023-04-05 01:02:41 +00:00
|
|
|
entry = mf2util.representative_hcard(parsed, parsed['url'])
|
|
|
|
logger.info(f'Representative h-card: {json_dumps(entry, indent=2)}')
|
|
|
|
if not entry:
|
2023-06-03 14:28:01 +00:00
|
|
|
msg = f"Couldn't find a representative h-card (http://microformats.org/wiki/representative-hcard-parsing) on {parsed['url']}"
|
|
|
|
logging.info(msg)
|
2023-06-10 14:53:07 +00:00
|
|
|
raise common.NoMicroformats(msg)
|
2023-04-05 01:02:41 +00:00
|
|
|
else:
|
|
|
|
entry = mf2util.find_first_entry(parsed, ['h-entry'])
|
|
|
|
if not entry:
|
|
|
|
error(f'No microformats2 found in {url}')
|
2023-03-23 03:49:28 +00:00
|
|
|
|
2023-04-02 02:13:51 +00:00
|
|
|
# store final URL in mf2 object, and also default url property to it,
|
|
|
|
# since that's the fallback for AS1/AS2 id
|
2023-06-02 19:55:07 +00:00
|
|
|
if is_homepage:
|
2023-04-05 01:02:41 +00:00
|
|
|
entry.setdefault('rel-urls', {}).update(parsed.get('rel-urls', {}))
|
2023-06-02 19:55:07 +00:00
|
|
|
entry.setdefault('type', ['h-card'])
|
2023-04-02 02:13:51 +00:00
|
|
|
props = entry.setdefault('properties', {})
|
2023-06-02 19:55:07 +00:00
|
|
|
if parsed['url']:
|
|
|
|
entry['url'] = parsed['url']
|
|
|
|
props.setdefault('url', [parsed['url']])
|
2023-04-02 02:13:51 +00:00
|
|
|
logger.info(f'Extracted microformats2 entry: {json_dumps(entry, indent=2)}')
|
2023-03-26 23:47:46 +00:00
|
|
|
|
|
|
|
# run full authorship algorithm if necessary: https://indieweb.org/authorship
|
|
|
|
# duplicated in microformats2.json_to_object
|
|
|
|
author = util.get_first(props, 'author')
|
2023-06-02 19:55:07 +00:00
|
|
|
if not isinstance(author, dict) and not is_homepage:
|
2023-04-03 14:53:15 +00:00
|
|
|
logger.info(f'Fetching full authorship for author {author}')
|
2023-03-26 23:47:46 +00:00
|
|
|
author = mf2util.find_author({'items': [entry]}, hentry=entry,
|
|
|
|
fetch_mf2_func=util.fetch_mf2)
|
2023-04-03 14:53:15 +00:00
|
|
|
logger.info(f'Got: {author}')
|
2023-04-02 02:13:51 +00:00
|
|
|
if author:
|
2023-04-05 01:02:41 +00:00
|
|
|
props['author'] = util.trim_nulls([{
|
2023-04-02 02:13:51 +00:00
|
|
|
"type": ["h-card"],
|
|
|
|
'properties': {
|
|
|
|
field: [author[field]] if author.get(field) else []
|
|
|
|
for field in ('name', 'photo', 'url')
|
|
|
|
},
|
2023-04-05 01:02:41 +00:00
|
|
|
}])
|
2023-04-02 02:13:51 +00:00
|
|
|
|
2023-04-03 14:53:15 +00:00
|
|
|
obj.mf2 = entry
|
2023-03-23 03:49:28 +00:00
|
|
|
return obj
|
2023-03-20 04:08:24 +00:00
|
|
|
|
2023-05-24 04:30:57 +00:00
|
|
|
@classmethod
|
|
|
|
def serve(cls, obj):
|
|
|
|
"""Serves an :class:`Object` as HTML."""
|
|
|
|
obj_as1 = obj.as1
|
|
|
|
|
2023-05-26 23:07:36 +00:00
|
|
|
from_proto = PROTOCOLS.get(obj.source_protocol)
|
2023-05-24 04:30:57 +00:00
|
|
|
if from_proto:
|
|
|
|
# fill in author/actor if available
|
|
|
|
for field in 'author', 'actor':
|
|
|
|
val = as1.get_object(obj.as1, field)
|
|
|
|
if val.keys() == set(['id']) and val['id']:
|
|
|
|
loaded = from_proto.load(val['id'])
|
|
|
|
if loaded and loaded.as1:
|
|
|
|
obj_as1 = {**obj_as1, field: loaded.as1}
|
|
|
|
else:
|
|
|
|
logger.debug(f'Not hydrating actor or author due to source_protocol {obj.source_protocol}')
|
|
|
|
|
|
|
|
html = microformats2.activities_to_html([obj_as1])
|
|
|
|
|
|
|
|
# add HTML meta redirect to source page. should trigger for end users in
|
|
|
|
# browsers but not for webmention receivers (hopefully).
|
|
|
|
url = util.get_url(obj_as1)
|
|
|
|
if url:
|
|
|
|
utf8 = '<meta charset="utf-8">'
|
|
|
|
refresh = f'<meta http-equiv="refresh" content="0;url={url}">'
|
|
|
|
html = html.replace(utf8, utf8 + '\n' + refresh)
|
|
|
|
|
|
|
|
return html, {'Content-Type': common.CONTENT_TYPE_HTML}
|
|
|
|
|
2023-03-20 04:08:24 +00:00
|
|
|
|
2023-05-31 00:24:49 +00:00
|
|
|
@app.get('/web-site')
|
|
|
|
@flask_util.cached(cache, datetime.timedelta(days=1))
|
|
|
|
def enter_web_site():
|
|
|
|
return render_template('enter_web_site.html')
|
|
|
|
|
|
|
|
|
|
|
|
@app.post('/web-site')
|
|
|
|
def check_web_site():
|
|
|
|
url = request.values['url']
|
2023-06-09 17:58:28 +00:00
|
|
|
# this normalizes and lower cases domain
|
2023-05-31 00:24:49 +00:00
|
|
|
domain = util.domain_from_link(url, minimize=False)
|
|
|
|
if not domain:
|
|
|
|
flash(f'No domain found in {url}')
|
|
|
|
return render_template('enter_web_site.html')
|
2023-06-23 19:22:37 +00:00
|
|
|
elif domain in common.DOMAINS:
|
|
|
|
flash(f'{domain} is a Bridgy Fed domain')
|
|
|
|
return render_template('enter_web_site.html')
|
2023-05-31 00:24:49 +00:00
|
|
|
|
|
|
|
g.user = Web.get_or_create(domain, direct=True)
|
|
|
|
try:
|
|
|
|
g.user = g.user.verify()
|
|
|
|
except BaseException as e:
|
|
|
|
code, body = util.interpret_http_exception(e)
|
|
|
|
if code:
|
|
|
|
flash(f"Couldn't connect to {url}: {e}")
|
|
|
|
return render_template('enter_web_site.html')
|
|
|
|
raise
|
|
|
|
|
|
|
|
g.user.put()
|
|
|
|
return redirect(g.user.user_page_path())
|
|
|
|
|
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
@app.post('/webmention')
|
|
|
|
def webmention_external():
|
|
|
|
"""Handles inbound webmention, enqueue task to process.
|
2023-01-05 04:48:39 +00:00
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
Use a task queue to deliver to followers because we send to each inbox in
|
|
|
|
serial, which can take a long time with many followers/instances.
|
|
|
|
"""
|
2023-06-02 18:10:04 +00:00
|
|
|
logger.info(f'Params: {list(request.form.items())}')
|
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
source = flask_util.get_required_param('source').strip()
|
|
|
|
if not util.is_web(source):
|
|
|
|
error(f'Bad URL {source}')
|
|
|
|
|
|
|
|
domain = util.domain_from_link(source, minimize=False)
|
2023-06-02 18:10:04 +00:00
|
|
|
if not domain:
|
|
|
|
error(f'Bad source URL {source}')
|
|
|
|
|
2023-05-27 00:40:29 +00:00
|
|
|
g.user = Web.get_by_id(domain)
|
2023-04-06 16:16:25 +00:00
|
|
|
if not g.user:
|
|
|
|
error(f'No user found for domain {domain}')
|
|
|
|
|
|
|
|
queue_path = tasks_client.queue_path(APP_ID, TASKS_LOCATION, 'webmention')
|
|
|
|
task = tasks_client.create_task(
|
|
|
|
parent=queue_path,
|
|
|
|
task={
|
|
|
|
'app_engine_http_request': {
|
|
|
|
'http_method': 'POST',
|
|
|
|
'relative_uri': '/_ah/queue/webmention',
|
|
|
|
'body': urlencode(request.form).encode(),
|
|
|
|
# https://googleapis.dev/python/cloudtasks/latest/gapic/v2/types.html#google.cloud.tasks_v2.types.AppEngineHttpRequest.headers
|
|
|
|
'headers': {'Content-Type': 'application/x-www-form-urlencoded'},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
msg = f'Enqueued task {task.name}.'
|
|
|
|
logger.info(msg)
|
|
|
|
return msg, 202
|
|
|
|
|
|
|
|
|
2023-04-17 00:37:02 +00:00
|
|
|
@app.post('/webmention-interactive')
|
|
|
|
def webmention_interactive():
|
|
|
|
"""Handler that runs interactive webmention-based requests from the web UI.
|
|
|
|
|
|
|
|
...eg the update profile button on user pages.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
webmention_external()
|
2023-06-01 01:34:33 +00:00
|
|
|
flash(f'Updating fediverse profile from <a href="{g.user.web_url()}">{g.user.key.id()}</a>...')
|
2023-04-17 00:37:02 +00:00
|
|
|
except HTTPException as e:
|
|
|
|
flash(util.linkify(str(e.description), pretty=True))
|
|
|
|
|
2023-06-03 15:03:38 +00:00
|
|
|
path = g.user.user_page_path() if g.user else '/'
|
2023-04-17 00:37:02 +00:00
|
|
|
return redirect(path, code=302)
|
|
|
|
|
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
@app.post('/_ah/queue/webmention')
|
|
|
|
def webmention_task():
|
2023-06-13 04:43:08 +00:00
|
|
|
"""Handles inbound webmention task."""
|
2023-04-06 16:16:25 +00:00
|
|
|
logger.info(f'Params: {list(request.form.items())}')
|
|
|
|
|
|
|
|
# load user
|
|
|
|
source = flask_util.get_required_param('source').strip()
|
|
|
|
domain = util.domain_from_link(source, minimize=False)
|
2023-04-09 21:34:22 +00:00
|
|
|
logger.info(f'webmention from {domain}')
|
|
|
|
|
2023-05-27 00:40:29 +00:00
|
|
|
g.user = Web.get_by_id(domain)
|
2023-04-06 16:16:25 +00:00
|
|
|
if not g.user:
|
|
|
|
error(f'No user found for domain {domain}', status=304)
|
|
|
|
|
|
|
|
# fetch source page
|
|
|
|
try:
|
2023-06-18 14:29:54 +00:00
|
|
|
obj = Web.load(source, remote=True, check_backlink=True)
|
2023-04-06 16:16:25 +00:00
|
|
|
except BadRequest as e:
|
|
|
|
error(str(e.description), status=304)
|
2023-04-17 22:36:29 +00:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.response.status_code not in (410, 404):
|
|
|
|
error(f'{e} ; {e.response.text if e.response else ""}', status=502)
|
|
|
|
|
|
|
|
create_id = f'{source}#bridgy-fed-create'
|
|
|
|
logger.info(f'Interpreting as Delete. Looking for {create_id}')
|
2023-04-19 00:17:48 +00:00
|
|
|
create = Object.get_by_id(create_id)
|
2023-04-17 22:36:29 +00:00
|
|
|
if not create or create.status != 'complete':
|
|
|
|
error(f"Bridgy Fed hasn't successfully published {source}", status=304)
|
|
|
|
|
|
|
|
id = f'{source}#bridgy-fed-delete'
|
2023-04-19 00:17:48 +00:00
|
|
|
obj = Object(id=id, our_as1={
|
2023-04-17 22:36:29 +00:00
|
|
|
'id': id,
|
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'delete',
|
2023-05-31 17:10:14 +00:00
|
|
|
'actor': g.user.ap_actor(),
|
2023-04-17 22:36:29 +00:00
|
|
|
'object': source,
|
|
|
|
})
|
2023-04-06 16:16:25 +00:00
|
|
|
|
2023-06-03 14:01:20 +00:00
|
|
|
if not obj.mf2 and obj.type != 'delete':
|
|
|
|
error(f'No microformats2 found in {source}', status=304)
|
|
|
|
elif obj.mf2:
|
2023-04-17 22:36:29 +00:00
|
|
|
# set actor to user
|
|
|
|
props = obj.mf2['properties']
|
|
|
|
author_urls = microformats2.get_string_urls(props.get('author', []))
|
2023-06-01 01:34:33 +00:00
|
|
|
if author_urls and not g.user.is_web_url(author_urls[0]):
|
2023-05-31 17:10:14 +00:00
|
|
|
logger.info(f'Overriding author {author_urls[0]} with {g.user.ap_actor()}')
|
|
|
|
props['author'] = [g.user.ap_actor()]
|
2023-06-03 14:01:20 +00:00
|
|
|
logger.info(f'Converted to AS1: {obj.type}: {json_dumps(obj.as1, indent=2)}')
|
2023-04-06 16:16:25 +00:00
|
|
|
|
2023-06-10 14:16:54 +00:00
|
|
|
# if source is home page, update Web user and send an actor Update to
|
|
|
|
# followers' instances
|
2023-06-01 01:34:33 +00:00
|
|
|
if g.user.is_web_url(obj.key.id()):
|
2023-04-06 16:16:25 +00:00
|
|
|
obj.put()
|
2023-06-16 04:22:20 +00:00
|
|
|
g.user.obj = obj
|
2023-06-10 14:16:54 +00:00
|
|
|
g.user.put()
|
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
actor_as1 = {
|
|
|
|
**obj.as1,
|
2023-05-31 17:10:14 +00:00
|
|
|
'id': g.user.ap_actor(),
|
2023-04-06 16:16:25 +00:00
|
|
|
'updated': util.now().isoformat(),
|
|
|
|
}
|
|
|
|
id = common.host_url(f'{obj.key.id()}#update-{util.now().isoformat()}')
|
2023-04-19 00:17:48 +00:00
|
|
|
obj = Object(id=id, our_as1={
|
2023-04-06 16:16:25 +00:00
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'update',
|
|
|
|
'id': id,
|
2023-05-31 17:10:14 +00:00
|
|
|
'actor': g.user.ap_actor(),
|
2023-04-06 16:16:25 +00:00
|
|
|
'object': actor_as1,
|
|
|
|
})
|
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
targets = _targets(obj) # maps Target to Object or None
|
2023-04-06 16:16:25 +00:00
|
|
|
|
|
|
|
obj.populate(
|
2023-06-09 19:56:45 +00:00
|
|
|
users=[g.user.key],
|
2023-05-30 19:15:36 +00:00
|
|
|
source_protocol='web',
|
2023-04-06 16:16:25 +00:00
|
|
|
)
|
2023-06-21 03:59:32 +00:00
|
|
|
if not targets:
|
2023-04-06 16:16:25 +00:00
|
|
|
obj.labels.append('user')
|
|
|
|
obj.status = 'ignored'
|
|
|
|
obj.put()
|
2023-06-21 03:59:32 +00:00
|
|
|
return 'No targets', 204
|
2023-04-06 16:16:25 +00:00
|
|
|
|
|
|
|
err = None
|
|
|
|
last_success = None
|
|
|
|
log_data = True
|
|
|
|
|
|
|
|
if obj.type in ('note', 'article', 'comment'):
|
|
|
|
# have we already seen this object? has it changed? or is it new?
|
|
|
|
if obj.changed:
|
|
|
|
logger.info(f'Content has changed from last time at {obj.updated}! Redelivering to all inboxes')
|
|
|
|
updated = util.now().isoformat()
|
|
|
|
id = f'{obj.key.id()}#bridgy-fed-update-{updated}'
|
|
|
|
logger.info(f'Wrapping in update activity {id}')
|
2023-04-05 01:02:41 +00:00
|
|
|
obj.put()
|
2023-04-06 16:16:25 +00:00
|
|
|
update_as1 = {
|
2023-04-05 01:02:41 +00:00
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'update',
|
|
|
|
'id': id,
|
2023-05-31 17:10:14 +00:00
|
|
|
'actor': g.user.ap_actor(),
|
2023-04-06 16:16:25 +00:00
|
|
|
'object': {
|
|
|
|
# Mastodon requires the updated field for Updates, so
|
|
|
|
# add a default value.
|
|
|
|
# https://docs.joinmastodon.org/spec/activitypub/#supported-activities-for-statuses
|
|
|
|
# https://socialhub.activitypub.rocks/t/what-could-be-the-reason-that-my-update-activity-does-not-work/2893/4
|
|
|
|
# https://github.com/mastodon/documentation/pull/1150
|
|
|
|
'updated': updated,
|
|
|
|
**obj.as1,
|
2023-04-02 02:13:51 +00:00
|
|
|
},
|
2023-04-06 16:16:25 +00:00
|
|
|
}
|
2023-04-19 00:17:48 +00:00
|
|
|
obj = Object(id=id, mf2=obj.mf2, our_as1=update_as1, labels=['user'],
|
2023-06-09 19:56:45 +00:00
|
|
|
users=[g.user.key], source_protocol='web')
|
2023-04-06 16:16:25 +00:00
|
|
|
|
2023-06-06 02:03:20 +00:00
|
|
|
elif obj.new or 'force' in request.form:
|
2023-04-06 16:16:25 +00:00
|
|
|
logger.info(f'New Object {obj.key.id()}')
|
|
|
|
id = f'{obj.key.id()}#bridgy-fed-create'
|
|
|
|
logger.info(f'Wrapping in post activity {id}')
|
|
|
|
obj.put()
|
|
|
|
create_as1 = {
|
|
|
|
'objectType': 'activity',
|
|
|
|
'verb': 'post',
|
|
|
|
'id': id,
|
2023-05-31 17:10:14 +00:00
|
|
|
'actor': g.user.ap_actor(),
|
2023-04-06 16:16:25 +00:00
|
|
|
'object': obj.as1,
|
|
|
|
}
|
2023-04-19 00:17:48 +00:00
|
|
|
obj = Object(id=id, mf2=obj.mf2, our_as1=create_as1,
|
2023-06-09 19:56:45 +00:00
|
|
|
users=[g.user.key], labels=['user'],
|
2023-05-30 19:15:36 +00:00
|
|
|
source_protocol='web')
|
2023-04-06 16:16:25 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
msg = f'{obj.key.id()} is unchanged, nothing to do'
|
2023-04-03 14:53:15 +00:00
|
|
|
logger.info(msg)
|
2023-04-06 16:16:25 +00:00
|
|
|
return msg, 204
|
|
|
|
|
2023-06-28 21:41:48 +00:00
|
|
|
sorted_targets = sorted(targets.items(), key=lambda t: t[0].uri)
|
2023-04-06 16:16:25 +00:00
|
|
|
obj.populate(
|
|
|
|
status='in progress',
|
|
|
|
labels=['user'],
|
|
|
|
delivered=[],
|
|
|
|
failed=[],
|
2023-06-28 21:41:48 +00:00
|
|
|
undelivered=[t for t, _ in sorted_targets],
|
2023-04-06 16:16:25 +00:00
|
|
|
)
|
2023-06-28 21:41:48 +00:00
|
|
|
logger.info(f'Delivering to: {obj.undelivered}')
|
2023-04-06 16:16:25 +00:00
|
|
|
|
2023-06-28 21:41:48 +00:00
|
|
|
# make copy of undelivered because we modify it below.
|
|
|
|
# sort targets so order is deterministic for tests.
|
|
|
|
for target, orig_obj in sorted_targets:
|
|
|
|
assert target.uri
|
|
|
|
protocol = PROTOCOLS[target.protocol]
|
2023-01-31 06:02:23 +00:00
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
if obj.type == 'follow':
|
2023-06-21 03:59:32 +00:00
|
|
|
# should be guaranteed by _targets()
|
|
|
|
assert orig_obj and orig_obj.as1
|
|
|
|
to = protocol.get_or_create(id=orig_obj.key.id(), obj=orig_obj)
|
|
|
|
Follower.get_or_create(to=to, from_=g.user, follow=obj.key)
|
2023-01-31 06:02:23 +00:00
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
# this is reused later in ActivityPub.send()
|
|
|
|
# TODO: find a better way
|
2023-06-21 03:59:32 +00:00
|
|
|
obj.orig_obj = orig_obj
|
2023-04-06 16:16:25 +00:00
|
|
|
try:
|
2023-06-28 21:41:48 +00:00
|
|
|
sent = protocol.send(obj, target.uri, log_data=log_data)
|
2023-06-21 03:59:32 +00:00
|
|
|
if sent:
|
2023-06-28 21:41:48 +00:00
|
|
|
obj.delivered.append(target)
|
|
|
|
obj.undelivered.remove(target)
|
2023-04-06 16:16:25 +00:00
|
|
|
except BaseException as e:
|
|
|
|
code, body = util.interpret_http_exception(e)
|
|
|
|
if not code and not body:
|
|
|
|
raise
|
2023-06-28 21:41:48 +00:00
|
|
|
obj.failed.append(target)
|
|
|
|
obj.undelivered.remove(target)
|
2023-04-06 16:16:25 +00:00
|
|
|
err = e
|
|
|
|
finally:
|
|
|
|
log_data = False
|
2023-01-29 17:45:03 +00:00
|
|
|
|
2023-01-29 04:49:20 +00:00
|
|
|
obj.put()
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
obj.status = ('complete' if obj.delivered
|
|
|
|
else 'failed' if obj.failed
|
|
|
|
else 'ignored')
|
|
|
|
obj.put()
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
# Pass the response status code and body through as our response
|
|
|
|
if obj.delivered:
|
|
|
|
return 'OK', 200
|
2023-04-06 16:16:25 +00:00
|
|
|
elif isinstance(err, BadGateway):
|
|
|
|
raise err
|
2023-04-17 22:36:29 +00:00
|
|
|
elif isinstance(err, HTTPError):
|
2023-04-06 16:16:25 +00:00
|
|
|
return str(err), err.status_code
|
2023-06-21 03:59:32 +00:00
|
|
|
elif obj.status == 'ignored':
|
|
|
|
return 'Nothing to do', 204
|
2023-04-06 16:16:25 +00:00
|
|
|
else:
|
2023-06-21 03:59:32 +00:00
|
|
|
return str(err) if err else r'¯\_(ツ)_/¯'
|
2023-04-02 02:13:51 +00:00
|
|
|
|
2022-11-14 15:07:33 +00:00
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
def _targets(obj):
|
|
|
|
"""Collects the targets to send an :class:`models.Object` to.
|
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
Args:
|
|
|
|
obj: :class:`models.Object`
|
2022-11-14 15:07:33 +00:00
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
Returns: dict: {
|
2023-06-28 21:41:48 +00:00
|
|
|
:class:`Target`: original (in response to) :class:`Object`, if any,
|
|
|
|
otherwise None
|
2023-06-21 03:59:32 +00:00
|
|
|
}
|
2023-04-06 16:16:25 +00:00
|
|
|
"""
|
2023-06-21 03:59:32 +00:00
|
|
|
logger.info('Finding recipients and their targets')
|
|
|
|
|
2023-04-06 16:16:25 +00:00
|
|
|
# if there's in-reply-to, like-of, or repost-of, they're the targets.
|
|
|
|
# otherwise, it's all followers' inboxes.
|
2023-06-21 03:59:32 +00:00
|
|
|
# sort so order is deterministic for tests.
|
|
|
|
orig_ids = sorted(as1.get_ids(obj.as1, 'inReplyTo'))
|
2023-04-18 02:20:22 +00:00
|
|
|
verb = obj.as1.get('verb')
|
2023-06-21 03:59:32 +00:00
|
|
|
if orig_ids:
|
|
|
|
logger.info(f'original object ids from inReplyTo: {orig_ids}')
|
2023-04-18 02:20:22 +00:00
|
|
|
elif verb in as1.VERBS_WITH_OBJECT:
|
2023-06-21 03:59:32 +00:00
|
|
|
# prefer id or url, if available
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/307
|
|
|
|
orig_ids = (as1.get_ids(obj.as1, 'object')
|
|
|
|
or util.get_urls(obj.as1, 'object'))
|
|
|
|
if not orig_ids:
|
|
|
|
error(f'{verb} missing target URL')
|
|
|
|
logger.info(f'original object ids from object: {orig_ids}')
|
|
|
|
|
|
|
|
orig_ids = sorted(common.remove_blocklisted(orig_ids))
|
|
|
|
orig_obj = None
|
|
|
|
targets = {}
|
|
|
|
for id in orig_ids:
|
|
|
|
protocol = Protocol.for_id(id)
|
|
|
|
if not protocol:
|
|
|
|
logger.info(f"Can't determine protocol for {id}")
|
|
|
|
continue
|
2023-04-06 16:16:25 +00:00
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
orig_obj = protocol.load(id)
|
|
|
|
if not orig_obj or not orig_obj.as1:
|
|
|
|
logger.info(f"Couldn't load {id}")
|
|
|
|
continue
|
2023-04-18 02:20:22 +00:00
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
target = protocol.target_for(orig_obj)
|
|
|
|
if target:
|
2023-06-28 21:41:48 +00:00
|
|
|
targets[Target(protocol=protocol.LABEL, uri=target)] = orig_obj
|
2023-06-21 03:59:32 +00:00
|
|
|
logger.info(f'Target for {id} is {target}')
|
2023-04-06 16:16:25 +00:00
|
|
|
continue
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2023-06-21 03:59:32 +00:00
|
|
|
# TODO: surface errors like this somehow?
|
|
|
|
logger.error(f"Can't find delivery target for {id}")
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2023-04-19 23:21:21 +00:00
|
|
|
if not targets or verb == 'share':
|
|
|
|
logger.info('Delivering to followers')
|
2023-06-23 21:26:03 +00:00
|
|
|
followers = Follower.query(Follower.to == g.user.key,
|
|
|
|
Follower.status == 'active'
|
|
|
|
).fetch()
|
|
|
|
users = ndb.get_multi(f.from_ for f in followers)
|
|
|
|
users = [u for u in users if u]
|
|
|
|
User.load_multi(users)
|
|
|
|
|
|
|
|
for user in users:
|
|
|
|
# TODO: should we pass remote=False through here to Protocol.load?
|
|
|
|
target = user.target_for(user.obj, shared=True) if user.obj else None
|
2023-06-21 03:59:32 +00:00
|
|
|
if not target:
|
|
|
|
# TODO: surface errors like this somehow?
|
2023-06-23 21:26:03 +00:00
|
|
|
logger.error(f'Follower {user.key} has no delivery target')
|
2023-06-21 03:59:32 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# HACK: use last target object from above for reposts, which
|
|
|
|
# has its resolved id
|
|
|
|
obj = orig_obj if verb == 'share' else None
|
2023-06-28 21:41:48 +00:00
|
|
|
targets[Target(protocol=user.LABEL, uri=target)] = obj
|
2023-06-21 03:59:32 +00:00
|
|
|
|
|
|
|
return targets
|