2017-08-15 14:39:22 +00:00
|
|
|
"""Handles inbound webmentions.
|
2017-08-26 22:20:54 +00:00
|
|
|
|
2017-09-02 03:49:00 +00:00
|
|
|
TODO tests:
|
|
|
|
* actor/attributedTo could be string URL
|
|
|
|
* salmon rel via webfinger via author.name + domain
|
2017-08-15 14:39:22 +00:00
|
|
|
"""
|
|
|
|
import logging
|
2019-12-26 06:20:57 +00:00
|
|
|
import urllib.parse
|
2021-07-11 15:48:28 +00:00
|
|
|
from urllib.parse import urlencode
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2017-08-23 15:14:51 +00:00
|
|
|
import django_salmon
|
2019-09-19 14:56:51 +00:00
|
|
|
from django_salmon import magicsigs
|
2017-08-23 15:14:51 +00:00
|
|
|
import feedparser
|
2021-07-11 15:48:28 +00:00
|
|
|
from flask import request
|
|
|
|
from flask.views import View
|
2019-12-26 06:20:57 +00:00
|
|
|
from google.cloud.ndb import Key
|
2022-07-16 04:09:18 +00:00
|
|
|
from granary import as1, as2, atom, microformats2
|
2017-08-15 14:39:22 +00:00
|
|
|
import mf2util
|
2021-07-18 04:22:13 +00:00
|
|
|
from oauth_dropins.webutil import flask_util, util
|
2021-08-06 17:29:25 +00:00
|
|
|
from oauth_dropins.webutil.flask_util import error
|
2019-12-25 07:26:58 +00:00
|
|
|
from oauth_dropins.webutil.util import json_dumps, json_loads
|
2017-08-15 14:42:29 +00:00
|
|
|
import requests
|
2021-08-16 18:47:31 +00:00
|
|
|
from werkzeug.exceptions import BadGateway
|
2017-08-15 14:39:22 +00:00
|
|
|
|
|
|
|
import activitypub
|
2021-07-11 15:48:28 +00:00
|
|
|
from app import app
|
2017-08-15 14:39:22 +00:00
|
|
|
import common
|
2022-11-24 16:20:04 +00:00
|
|
|
from models import Activity, Follower, User
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2022-02-12 06:38:56 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-02-02 15:41:37 +00:00
|
|
|
SKIP_EMAIL_DOMAINS = frozenset(('localhost', 'snarfed.org'))
|
|
|
|
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
class Webmention(View):
|
2018-11-13 15:26:50 +00:00
|
|
|
"""Handles inbound webmention, converts to ActivityPub or Salmon."""
|
|
|
|
source_url = None # string
|
|
|
|
source_domain = None # string
|
|
|
|
source_mf2 = None # parsed mf2 dict
|
|
|
|
source_obj = None # parsed AS1 dict
|
2018-11-19 00:58:52 +00:00
|
|
|
target_resp = None # requests.Response
|
2022-11-24 16:20:04 +00:00
|
|
|
user = None # User
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
def dispatch_request(self):
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info(f'Params: {list(request.form.items())}')
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
# fetch source page
|
2021-07-18 04:22:13 +00:00
|
|
|
source = flask_util.get_required_param('source')
|
2022-11-29 01:48:33 +00:00
|
|
|
logger.info(f'webmention from {util.domain_from_link(source, minimize=False)}')
|
2022-11-24 16:20:04 +00:00
|
|
|
try:
|
|
|
|
source_resp = util.requests_get(source)
|
|
|
|
except ValueError as e:
|
|
|
|
error(f'Bad source URL: {source}: {e}')
|
2018-11-13 15:26:50 +00:00
|
|
|
self.source_url = source_resp.url or source
|
2019-12-26 06:20:57 +00:00
|
|
|
self.source_domain = urllib.parse.urlparse(self.source_url).netloc.split(':')[0]
|
2022-11-18 10:18:35 +00:00
|
|
|
fragment = urllib.parse.urlparse(self.source_url).fragment
|
|
|
|
self.source_mf2 = util.parse_mf2(source_resp, id=fragment)
|
2019-10-04 04:08:26 +00:00
|
|
|
|
2022-11-18 23:28:34 +00:00
|
|
|
if id and self.source_mf2 is None:
|
|
|
|
error(f'id {fragment} not found in {self.source_url}')
|
|
|
|
|
2022-02-12 06:38:56 +00:00
|
|
|
# logger.debug(f'Parsed mf2 for {source_resp.url} : {json_dumps(self.source_mf2 indent=2)}')
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2018-11-27 15:27:00 +00:00
|
|
|
# check for backlink to bridgy fed (for webmention spec and to confirm
|
|
|
|
# source's intent to federate to mastodon)
|
2022-11-24 17:38:30 +00:00
|
|
|
for domain in common.DOMAINS:
|
|
|
|
if domain in source_resp.text:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
error(f"Couldn't find link to {request.host_url.rstrip('/')}")
|
2018-11-27 15:27:00 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
# convert source page to ActivityStreams
|
|
|
|
entry = mf2util.find_first_entry(self.source_mf2, ['h-entry'])
|
|
|
|
if not entry:
|
2021-11-01 23:14:36 +00:00
|
|
|
error(f'No microformats2 found on {self.source_url}')
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2022-11-18 10:18:35 +00:00
|
|
|
logger.info(f'First entry (id={fragment}: {json_dumps(entry, indent=2)}')
|
2018-11-13 15:26:50 +00:00
|
|
|
# make sure it has url, since we use that for AS2 id, which is required
|
|
|
|
# for ActivityPub.
|
|
|
|
props = entry.setdefault('properties', {})
|
|
|
|
if not props.get('url'):
|
|
|
|
props['url'] = [self.source_url]
|
|
|
|
|
|
|
|
self.source_obj = microformats2.json_to_object(entry, fetch_mf2=True)
|
2022-11-29 01:48:33 +00:00
|
|
|
type_label = ' '.join((
|
|
|
|
self.source_obj.get('verb', ''),
|
|
|
|
self.source_obj.get('objectType'), '',
|
|
|
|
self.source_obj.get('object', {}).get('objectType', ''),
|
|
|
|
))
|
2022-11-30 06:08:01 +00:00
|
|
|
logger.info(f'Converted webmention to AS1: {type_label}: {json_dumps(self.source_obj, indent=2)}')
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2022-11-24 16:20:04 +00:00
|
|
|
self.user = User.get_or_create(self.source_domain)
|
2021-07-11 15:48:28 +00:00
|
|
|
for method in self.try_activitypub, self.try_salmon:
|
|
|
|
ret = method()
|
|
|
|
if ret:
|
|
|
|
return ret
|
|
|
|
|
|
|
|
return ''
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2017-10-26 14:30:52 +00:00
|
|
|
def try_activitypub(self):
|
2020-11-13 17:50:14 +00:00
|
|
|
"""Attempts ActivityPub delivery.
|
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
Returns Flask response (string body or tuple) if we succeeded or failed,
|
|
|
|
None if ActivityPub was not available.
|
2020-11-13 17:50:14 +00:00
|
|
|
"""
|
2018-11-13 15:26:50 +00:00
|
|
|
targets = self._activitypub_targets()
|
|
|
|
if not targets:
|
2020-11-13 17:50:14 +00:00
|
|
|
return None
|
2017-10-26 14:30:52 +00:00
|
|
|
|
2018-12-11 16:00:38 +00:00
|
|
|
error = None
|
2020-06-06 15:39:44 +00:00
|
|
|
last_success = None
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
# TODO: collect by inbox, add 'to' fields, de-dupe inboxes and recipients
|
2018-03-27 14:04:33 +00:00
|
|
|
|
2022-11-24 18:16:35 +00:00
|
|
|
for activity, inbox in targets:
|
|
|
|
target_obj = json_loads(activity.target_as2) if activity.target_as2 else None
|
2022-11-27 04:20:08 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
source_activity = common.postprocess_as2(
|
2022-11-24 16:20:04 +00:00
|
|
|
as2.from_as1(self.source_obj), target=target_obj, user=self.user)
|
2022-11-27 04:20:08 +00:00
|
|
|
if not source_activity.get('actor'):
|
|
|
|
source_activity['actor'] = f'{request.host_url}{self.source_domain}'
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2022-11-24 18:16:35 +00:00
|
|
|
if activity.status == 'complete':
|
|
|
|
if activity.source_mf2:
|
2021-10-10 20:47:25 +00:00
|
|
|
def content(mf2):
|
|
|
|
items = mf2.get('items')
|
|
|
|
if items:
|
|
|
|
return microformats2.first_props(
|
|
|
|
items[0].get('properties')
|
|
|
|
).get('content')
|
|
|
|
|
2022-11-24 18:16:35 +00:00
|
|
|
orig_content = content(json_loads(activity.source_mf2))
|
2021-10-10 20:47:25 +00:00
|
|
|
new_content = content(self.source_mf2)
|
|
|
|
if orig_content and new_content and orig_content == new_content:
|
2022-11-24 18:16:35 +00:00
|
|
|
logger.info(f'Skipping; new content is same as content published before at {activity.updated}')
|
2022-10-30 02:51:59 +00:00
|
|
|
continue
|
2021-10-10 20:47:25 +00:00
|
|
|
|
2022-11-15 05:53:50 +00:00
|
|
|
if source_activity.get('type') == 'Create':
|
|
|
|
source_activity['type'] = 'Update'
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2022-11-24 18:16:35 +00:00
|
|
|
if self.source_obj.get('verb') == 'follow':
|
|
|
|
# prefer AS2 id or url, if available
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/307
|
2022-12-01 05:04:22 +00:00
|
|
|
dest = ((target_obj.get('id') or util.get_first(target_obj, 'url'))
|
|
|
|
if target_obj else util.get_url(self.source_obj, 'object'))
|
2022-11-24 18:16:35 +00:00
|
|
|
Follower.get_or_create(dest=dest, src=self.source_domain,
|
|
|
|
last_follow=json_dumps(self.source_obj))
|
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
try:
|
2022-11-24 16:20:04 +00:00
|
|
|
last = common.signed_post(inbox, data=source_activity, user=self.user)
|
2022-11-24 18:16:35 +00:00
|
|
|
activity.status = 'complete'
|
2020-06-06 15:39:44 +00:00
|
|
|
last_success = last
|
2018-12-11 16:00:38 +00:00
|
|
|
except BaseException as e:
|
|
|
|
error = e
|
2022-11-24 18:16:35 +00:00
|
|
|
activity.status = 'error'
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2022-11-24 18:16:35 +00:00
|
|
|
activity.put()
|
2018-11-13 15:26:50 +00:00
|
|
|
|
|
|
|
# Pass the AP response status code and body through as our response
|
2020-06-06 15:39:44 +00:00
|
|
|
if last_success:
|
2021-07-20 22:55:16 +00:00
|
|
|
return last_success.text or 'Sent!', last_success.status_code
|
2021-08-16 18:47:31 +00:00
|
|
|
elif isinstance(error, BadGateway):
|
|
|
|
raise error
|
|
|
|
elif isinstance(error, requests.HTTPError):
|
2021-07-11 15:48:28 +00:00
|
|
|
return str(error), error.status_code
|
2018-12-11 16:00:38 +00:00
|
|
|
else:
|
2021-07-11 15:48:28 +00:00
|
|
|
return str(error)
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2020-06-06 15:39:44 +00:00
|
|
|
def _targets(self):
|
2018-11-19 00:58:52 +00:00
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
Returns: list of string URLs, the source's inReplyTos or objects
|
|
|
|
(if appropriate)
|
2018-11-19 00:58:52 +00:00
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
targets = util.get_urls(self.source_obj, 'inReplyTo')
|
|
|
|
if targets:
|
2022-11-14 15:07:33 +00:00
|
|
|
logger.info(f'targets from inReplyTo: {targets}')
|
2020-06-06 15:39:44 +00:00
|
|
|
return targets
|
2018-11-19 00:58:52 +00:00
|
|
|
|
2022-07-16 04:09:18 +00:00
|
|
|
if self.source_obj.get('verb') in as1.VERBS_WITH_OBJECT:
|
2022-11-14 15:07:33 +00:00
|
|
|
targets = util.get_urls(self.source_obj, 'object')
|
|
|
|
logger.info(f'targets from object: {targets}')
|
|
|
|
return targets
|
2018-11-13 15:26:50 +00:00
|
|
|
|
|
|
|
def _activitypub_targets(self):
|
|
|
|
"""
|
2022-11-12 23:27:59 +00:00
|
|
|
Returns: list of (Activity, string inbox URL)
|
2018-11-13 15:26:50 +00:00
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
# if there's in-reply-to, like-of, or repost-of, they're the targets.
|
2018-11-13 15:26:50 +00:00
|
|
|
# otherwise, it's all followers' inboxes.
|
2020-06-06 15:39:44 +00:00
|
|
|
targets = self._targets()
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2020-06-06 15:39:44 +00:00
|
|
|
if not targets:
|
2018-11-13 15:26:50 +00:00
|
|
|
# interpret this as a Create or Update, deliver it to followers
|
2021-02-24 21:41:46 +00:00
|
|
|
inboxes = set()
|
2018-11-20 16:37:57 +00:00
|
|
|
for follower in Follower.query().filter(
|
2018-11-13 15:26:50 +00:00
|
|
|
Follower.key > Key('Follower', self.source_domain + ' '),
|
2018-11-20 16:37:57 +00:00
|
|
|
Follower.key < Key('Follower', self.source_domain + chr(ord(' ') + 1))):
|
2019-08-01 14:36:34 +00:00
|
|
|
if follower.status != 'inactive' and follower.last_follow:
|
2019-12-25 07:26:58 +00:00
|
|
|
actor = json_loads(follower.last_follow).get('actor')
|
2018-11-20 17:47:01 +00:00
|
|
|
if actor and isinstance(actor, dict):
|
2021-02-24 21:41:46 +00:00
|
|
|
inboxes.add(actor.get('endpoints', {}).get('sharedInbox') or
|
|
|
|
actor.get('publicInbox')or
|
|
|
|
actor.get('inbox'))
|
2022-11-14 15:07:33 +00:00
|
|
|
inboxes = [(Activity.get_or_create(
|
2022-11-17 02:53:49 +00:00
|
|
|
source=self.source_url, target=inbox,
|
|
|
|
domain=[self.source_domain], direction='out',
|
|
|
|
protocol='activitypub', source_mf2=json_dumps(self.source_mf2)),
|
|
|
|
inbox) for inbox in sorted(inboxes) if inbox]
|
2022-11-14 15:07:33 +00:00
|
|
|
logger.info(f"Delivering to followers' inboxes: {[i for _, i in inboxes]}")
|
|
|
|
return inboxes
|
|
|
|
|
|
|
|
targets = common.remove_blocklisted(targets)
|
|
|
|
if not targets:
|
|
|
|
error(f"Silo responses are not yet supported.")
|
2017-10-02 04:43:01 +00:00
|
|
|
|
2022-11-13 17:33:40 +00:00
|
|
|
activities_and_inbox_urls = []
|
2020-06-06 15:39:44 +00:00
|
|
|
for target in targets:
|
2022-11-14 15:07:33 +00:00
|
|
|
# fetch target page as AS2 object
|
|
|
|
try:
|
|
|
|
self.target_resp = common.get_as2(target)
|
|
|
|
except (requests.HTTPError, BadGateway) as e:
|
|
|
|
self.target_resp = getattr(e, 'requests_response', None)
|
|
|
|
if self.target_resp and self.target_resp.status_code // 100 == 2:
|
|
|
|
content_type = common.content_type(self.target_resp) or ''
|
|
|
|
if content_type.startswith('text/html'):
|
|
|
|
# TODO: pass e.requests_response to try_salmon's target_resp
|
|
|
|
continue # give up
|
|
|
|
raise
|
|
|
|
target_url = self.target_resp.url or target
|
|
|
|
|
|
|
|
activity = Activity.get_or_create(
|
2022-11-17 02:53:49 +00:00
|
|
|
source=self.source_url, target=target_url, domain=[self.source_domain],
|
2022-11-14 15:07:33 +00:00
|
|
|
direction='out', protocol='activitypub',
|
|
|
|
source_mf2=json_dumps(self.source_mf2))
|
|
|
|
|
|
|
|
# find target's inbox
|
|
|
|
target_obj = self.target_resp.json()
|
|
|
|
activity.target_as2 = json_dumps(target_obj)
|
|
|
|
inbox_url = target_obj.get('inbox')
|
|
|
|
|
|
|
|
if not inbox_url:
|
|
|
|
# TODO: test actor/attributedTo and not, with/without inbox
|
|
|
|
actor = (util.get_first(target_obj, 'actor') or
|
|
|
|
util.get_first(target_obj, 'attributedTo'))
|
|
|
|
if isinstance(actor, dict):
|
|
|
|
inbox_url = actor.get('inbox')
|
2022-12-01 05:04:22 +00:00
|
|
|
actor = util.get_first(actor, 'url') or actor.get('id')
|
2022-11-14 15:07:33 +00:00
|
|
|
if not inbox_url and not actor:
|
|
|
|
error('Target object has no actor or attributedTo with URL or id.')
|
|
|
|
elif not isinstance(actor, str):
|
|
|
|
error(f'Target actor or attributedTo has unexpected url or id object: {actor}')
|
|
|
|
|
|
|
|
if not inbox_url:
|
|
|
|
# fetch actor as AS object
|
|
|
|
actor = common.get_as2(actor).json()
|
|
|
|
inbox_url = actor.get('inbox')
|
|
|
|
|
|
|
|
if not inbox_url:
|
|
|
|
# TODO: probably need a way to save errors like this so that we can
|
|
|
|
# return them if ostatus fails too.
|
|
|
|
# error('Target actor has no inbox')
|
|
|
|
continue
|
|
|
|
|
|
|
|
inbox_url = urllib.parse.urljoin(target_url, inbox_url)
|
|
|
|
activities_and_inbox_urls.append((activity, inbox_url))
|
|
|
|
|
|
|
|
logger.info(f"Delivering to targets' inboxes: {[i for _, i in activities_and_inbox_urls]}")
|
2022-11-13 17:33:40 +00:00
|
|
|
return activities_and_inbox_urls
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2018-11-19 00:58:52 +00:00
|
|
|
def try_salmon(self):
|
2021-07-11 15:48:28 +00:00
|
|
|
"""
|
|
|
|
Returns Flask response (string body or tuple) if we attempted OStatus
|
|
|
|
delivery (whether successful or not), None if we didn't attempt, raises
|
|
|
|
an exception otherwise.
|
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
target = None
|
|
|
|
if self.target_resp:
|
|
|
|
target = self.target_resp.url
|
|
|
|
else:
|
|
|
|
targets = self._targets()
|
|
|
|
if targets:
|
|
|
|
target = targets[0]
|
2018-11-27 15:44:27 +00:00
|
|
|
if not target:
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.warning("No targets or followers. Ignoring.")
|
2021-07-11 15:48:28 +00:00
|
|
|
return
|
2018-11-27 15:44:27 +00:00
|
|
|
|
2021-07-20 22:55:16 +00:00
|
|
|
status = None
|
2018-11-13 15:26:50 +00:00
|
|
|
try:
|
2021-07-20 22:55:16 +00:00
|
|
|
ret = self._try_salmon(target)
|
|
|
|
if isinstance(ret, str):
|
|
|
|
status = 'complete'
|
2018-11-19 00:58:52 +00:00
|
|
|
return ret
|
2018-11-13 15:26:50 +00:00
|
|
|
except:
|
2021-07-20 22:55:16 +00:00
|
|
|
status = 'error'
|
2018-11-13 15:26:50 +00:00
|
|
|
raise
|
|
|
|
finally:
|
2021-07-20 22:55:16 +00:00
|
|
|
if status:
|
2022-11-12 23:27:59 +00:00
|
|
|
Activity(source=self.source_url, target=target, status=status,
|
2022-11-17 02:53:49 +00:00
|
|
|
domain=[self.source_domain], direction='out',
|
2022-11-08 14:56:19 +00:00
|
|
|
protocol = 'ostatus',
|
2021-07-20 22:55:16 +00:00
|
|
|
source_mf2=json_dumps(self.source_mf2)).put()
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2021-07-20 22:55:16 +00:00
|
|
|
def _try_salmon(self, target):
|
2018-11-13 15:26:50 +00:00
|
|
|
"""
|
|
|
|
Args:
|
2021-07-20 22:55:16 +00:00
|
|
|
target: string
|
2018-11-13 15:26:50 +00:00
|
|
|
"""
|
2017-08-23 15:14:51 +00:00
|
|
|
# fetch target HTML page, extract Atom rel-alternate link
|
2018-11-19 00:58:52 +00:00
|
|
|
if not self.target_resp:
|
2022-11-24 16:20:04 +00:00
|
|
|
self.target_resp = util.requests_get(target)
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2019-10-04 04:08:26 +00:00
|
|
|
parsed = util.parse_html(self.target_resp)
|
2017-10-20 14:49:25 +00:00
|
|
|
atom_url = parsed.find('link', rel='alternate', type=common.CONTENT_TYPE_ATOM)
|
|
|
|
if not atom_url or not atom_url.get('href'):
|
2021-08-06 17:29:25 +00:00
|
|
|
error(f'Target post {target} has no Atom link')
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2017-10-18 02:39:00 +00:00
|
|
|
# fetch Atom target post, extract and inject id into source object
|
2019-09-23 14:19:01 +00:00
|
|
|
base_url = ''
|
|
|
|
base = parsed.find('base')
|
|
|
|
if base and base.get('href'):
|
|
|
|
base_url = base['href']
|
|
|
|
atom_link = parsed.find('link', rel='alternate', type=common.CONTENT_TYPE_ATOM)
|
2019-12-26 06:20:57 +00:00
|
|
|
atom_url = urllib.parse.urljoin(
|
2021-07-20 22:55:16 +00:00
|
|
|
target, urllib.parse.urljoin(base_url, atom_link['href']))
|
2019-09-23 14:19:01 +00:00
|
|
|
|
2022-11-24 16:20:04 +00:00
|
|
|
feed = util.requests_get(atom_url).text
|
2017-08-23 15:14:51 +00:00
|
|
|
parsed = feedparser.parse(feed)
|
2017-09-02 03:49:00 +00:00
|
|
|
entry = parsed.entries[0]
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info(f'Parsed: {json_dumps(entry, indent=2)}')
|
2017-09-02 03:49:00 +00:00
|
|
|
target_id = entry.id
|
2018-11-13 15:26:50 +00:00
|
|
|
in_reply_to = self.source_obj.get('inReplyTo')
|
|
|
|
source_obj_obj = self.source_obj.get('object')
|
2017-10-18 02:39:00 +00:00
|
|
|
if in_reply_to:
|
2020-06-06 15:39:44 +00:00
|
|
|
for elem in in_reply_to:
|
|
|
|
if elem.get('url') == target:
|
|
|
|
elem['id'] = target_id
|
2017-10-18 02:39:00 +00:00
|
|
|
elif isinstance(source_obj_obj, dict):
|
|
|
|
source_obj_obj['id'] = target_id
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2017-09-02 03:49:00 +00:00
|
|
|
# Mastodon (and maybe others?) require a rel-mentioned link to the
|
|
|
|
# original post's author to make it show up as a reply:
|
|
|
|
# app/services/process_interaction_service.rb
|
|
|
|
# ...so add them as a tag, which atom renders as a rel-mention link.
|
2017-09-03 22:26:41 +00:00
|
|
|
authors = entry.get('authors', None)
|
|
|
|
if authors:
|
|
|
|
url = entry.authors[0].get('href')
|
2017-09-02 03:49:00 +00:00
|
|
|
if url:
|
2018-11-13 15:26:50 +00:00
|
|
|
self.source_obj.setdefault('tags', []).append({'url': url})
|
2017-09-02 03:49:00 +00:00
|
|
|
|
2017-10-18 02:39:00 +00:00
|
|
|
# extract and discover salmon endpoint
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info(f'Discovering Salmon endpoint in {atom_url}')
|
2017-08-23 15:14:51 +00:00
|
|
|
endpoint = django_salmon.discover_salmon_endpoint(feed)
|
2017-09-02 03:49:00 +00:00
|
|
|
|
|
|
|
if not endpoint:
|
|
|
|
# try webfinger
|
2021-07-20 22:55:16 +00:00
|
|
|
parsed = urllib.parse.urlparse(target)
|
2017-09-03 23:01:52 +00:00
|
|
|
# TODO: test missing email
|
2019-12-26 22:42:58 +00:00
|
|
|
author = entry.get('author_detail', {})
|
|
|
|
email = author.get('email') or '@'.join(
|
|
|
|
(author.get('name', ''), parsed.netloc))
|
2017-09-02 03:49:00 +00:00
|
|
|
try:
|
2017-10-04 14:09:02 +00:00
|
|
|
# TODO: always https?
|
2022-11-24 16:20:04 +00:00
|
|
|
profile = util.requests_get(
|
2017-10-04 14:09:02 +00:00
|
|
|
'%s://%s/.well-known/webfinger?resource=acct:%s' %
|
2022-11-24 16:20:04 +00:00
|
|
|
(parsed.scheme, parsed.netloc, email))
|
|
|
|
endpoint = django_salmon.get_salmon_replies_link(profile.json())
|
|
|
|
except ValueError:
|
|
|
|
logging.warning("Couldn't parse response as JSON")
|
|
|
|
except requests.HTTPError:
|
2017-09-02 03:49:00 +00:00
|
|
|
pass
|
|
|
|
|
2017-08-23 15:14:51 +00:00
|
|
|
if not endpoint:
|
2021-08-06 17:29:25 +00:00
|
|
|
error('No salmon endpoint found!')
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info(f'Discovered Salmon endpoint {endpoint}')
|
2017-08-23 15:14:51 +00:00
|
|
|
|
|
|
|
# construct reply Atom object
|
2018-11-13 15:26:50 +00:00
|
|
|
activity = self.source_obj
|
2022-07-16 04:09:18 +00:00
|
|
|
if self.source_obj.get('verb') not in as1.VERBS_WITH_OBJECT:
|
2018-11-13 15:26:50 +00:00
|
|
|
activity = {'object': self.source_obj}
|
|
|
|
entry = atom.activity_to_atom(activity, xml_base=self.source_url)
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info(f'Converted {self.source_url} to Atom:\n{entry}')
|
2017-08-23 15:14:51 +00:00
|
|
|
|
|
|
|
# sign reply and wrap in magic envelope
|
2019-12-26 06:20:57 +00:00
|
|
|
domain = urllib.parse.urlparse(self.source_url).netloc
|
2017-08-23 15:14:51 +00:00
|
|
|
magic_envelope = magicsigs.magic_envelope(
|
2022-11-24 16:20:04 +00:00
|
|
|
entry, common.CONTENT_TYPE_ATOM, self.user).decode()
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2022-02-12 06:38:56 +00:00
|
|
|
logger.info(f'Sending Salmon slap to {endpoint}')
|
2022-11-24 16:20:04 +00:00
|
|
|
util.requests_post(
|
2017-10-20 14:49:25 +00:00
|
|
|
endpoint, data=common.XML_UTF8 + magic_envelope,
|
|
|
|
headers={'Content-Type': common.CONTENT_TYPE_MAGIC_ENVELOPE})
|
2021-07-11 15:48:28 +00:00
|
|
|
|
2021-07-20 22:55:16 +00:00
|
|
|
return 'Sent!'
|
2017-08-15 14:39:22 +00:00
|
|
|
|
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
app.add_url_rule('/webmention', view_func=Webmention.as_view('webmention'),
|
|
|
|
methods=['POST'])
|