2017-08-15 14:39:22 +00:00
|
|
|
"""Handles inbound webmentions.
|
2017-08-26 22:20:54 +00:00
|
|
|
|
2017-09-02 03:49:00 +00:00
|
|
|
TODO tests:
|
|
|
|
* actor/attributedTo could be string URL
|
|
|
|
* salmon rel via webfinger via author.name + domain
|
2017-08-15 14:39:22 +00:00
|
|
|
"""
|
|
|
|
import logging
|
2019-12-26 06:20:57 +00:00
|
|
|
import urllib.parse
|
2021-07-11 15:48:28 +00:00
|
|
|
from urllib.parse import urlencode
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2017-08-23 15:14:51 +00:00
|
|
|
import django_salmon
|
2019-09-19 14:56:51 +00:00
|
|
|
from django_salmon import magicsigs
|
2017-08-23 15:14:51 +00:00
|
|
|
import feedparser
|
2021-07-11 15:48:28 +00:00
|
|
|
from flask import request
|
|
|
|
from flask.views import View
|
2019-12-26 06:20:57 +00:00
|
|
|
from google.cloud.ndb import Key
|
2017-10-18 02:39:00 +00:00
|
|
|
from granary import as2, atom, microformats2, source
|
2017-08-15 14:39:22 +00:00
|
|
|
import mf2util
|
|
|
|
from oauth_dropins.webutil import util
|
2019-12-25 07:26:58 +00:00
|
|
|
from oauth_dropins.webutil.util import json_dumps, json_loads
|
2017-08-15 14:42:29 +00:00
|
|
|
import requests
|
2017-08-15 14:39:22 +00:00
|
|
|
import webapp2
|
2017-10-20 14:49:25 +00:00
|
|
|
from webob import exc
|
2017-08-15 14:39:22 +00:00
|
|
|
|
|
|
|
import activitypub
|
2021-07-11 15:48:28 +00:00
|
|
|
from app import app
|
2017-08-15 14:39:22 +00:00
|
|
|
import common
|
2021-07-11 15:48:28 +00:00
|
|
|
from common import error
|
2018-11-13 15:26:50 +00:00
|
|
|
from models import Follower, MagicKey, Response
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2018-02-02 15:41:37 +00:00
|
|
|
SKIP_EMAIL_DOMAINS = frozenset(('localhost', 'snarfed.org'))
|
|
|
|
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
class Webmention(View):
|
2018-11-13 15:26:50 +00:00
|
|
|
"""Handles inbound webmention, converts to ActivityPub or Salmon."""
|
|
|
|
source_url = None # string
|
|
|
|
source_domain = None # string
|
|
|
|
source_mf2 = None # parsed mf2 dict
|
|
|
|
source_obj = None # parsed AS1 dict
|
2018-11-19 00:58:52 +00:00
|
|
|
target_resp = None # requests.Response
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
def dispatch_request(self):
|
|
|
|
logging.info(f'Params: {list(request.form.items())}')
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
# fetch source page
|
2021-07-11 15:48:28 +00:00
|
|
|
source = common.get_required_param('source')
|
2018-11-13 15:26:50 +00:00
|
|
|
source_resp = common.requests_get(source)
|
|
|
|
self.source_url = source_resp.url or source
|
2019-12-26 06:20:57 +00:00
|
|
|
self.source_domain = urllib.parse.urlparse(self.source_url).netloc.split(':')[0]
|
2019-10-04 04:08:26 +00:00
|
|
|
self.source_mf2 = util.parse_mf2(source_resp)
|
|
|
|
|
2019-12-25 07:26:58 +00:00
|
|
|
# logging.debug('Parsed mf2 for %s: %s', source_resp.url, json_dumps(self.source_mf2 indent=2))
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2018-11-27 15:27:00 +00:00
|
|
|
# check for backlink to bridgy fed (for webmention spec and to confirm
|
|
|
|
# source's intent to federate to mastodon)
|
2021-07-11 15:48:28 +00:00
|
|
|
if (request.host_url not in source_resp.text and
|
|
|
|
urllib.parse.quote(request.host_url, safe='') not in source_resp.text):
|
|
|
|
return error("Couldn't find link to {request.host_url}")
|
2018-11-27 15:27:00 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
# convert source page to ActivityStreams
|
|
|
|
entry = mf2util.find_first_entry(self.source_mf2, ['h-entry'])
|
|
|
|
if not entry:
|
2021-07-11 15:48:28 +00:00
|
|
|
return error('No microformats2 found on {self.source_url}')
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'First entry: {json_dumps(entry, indent=2)}')
|
2018-11-13 15:26:50 +00:00
|
|
|
# make sure it has url, since we use that for AS2 id, which is required
|
|
|
|
# for ActivityPub.
|
|
|
|
props = entry.setdefault('properties', {})
|
|
|
|
if not props.get('url'):
|
|
|
|
props['url'] = [self.source_url]
|
|
|
|
|
|
|
|
self.source_obj = microformats2.json_to_object(entry, fetch_mf2=True)
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'Converted to AS1: {json_dumps(self.source_obj, indent=2)}')
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
for method in self.try_activitypub, self.try_salmon:
|
|
|
|
ret = method()
|
|
|
|
if ret:
|
|
|
|
return ret
|
|
|
|
|
|
|
|
return ''
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2017-10-26 14:30:52 +00:00
|
|
|
def try_activitypub(self):
|
2020-11-13 17:50:14 +00:00
|
|
|
"""Attempts ActivityPub delivery.
|
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
Returns Flask response (string body or tuple) if we succeeded or failed,
|
|
|
|
None if ActivityPub was not available.
|
2020-11-13 17:50:14 +00:00
|
|
|
"""
|
2018-11-13 15:26:50 +00:00
|
|
|
targets = self._activitypub_targets()
|
|
|
|
if not targets:
|
2020-11-13 17:50:14 +00:00
|
|
|
return None
|
2017-10-26 14:30:52 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
key = MagicKey.get_or_create(self.source_domain)
|
2018-12-11 16:00:38 +00:00
|
|
|
error = None
|
2020-06-06 15:39:44 +00:00
|
|
|
last_success = None
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
# TODO: collect by inbox, add 'to' fields, de-dupe inboxes and recipients
|
2018-03-27 14:04:33 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
for resp, inbox in targets:
|
2019-12-25 07:26:58 +00:00
|
|
|
target_obj = json_loads(resp.target_as2) if resp.target_as2 else None
|
2021-07-11 15:48:28 +00:00
|
|
|
source_activity = common.postprocess_as2(
|
2018-11-20 16:22:26 +00:00
|
|
|
as2.from_as1(self.source_obj), target=target_obj, key=key)
|
2018-11-13 15:26:50 +00:00
|
|
|
|
|
|
|
if resp.status == 'complete':
|
|
|
|
source_activity['type'] = 'Update'
|
|
|
|
|
|
|
|
try:
|
|
|
|
last = activitypub.send(source_activity, inbox, self.source_domain)
|
|
|
|
resp.status = 'complete'
|
2020-06-06 15:39:44 +00:00
|
|
|
last_success = last
|
2018-12-11 16:00:38 +00:00
|
|
|
except BaseException as e:
|
|
|
|
error = e
|
2018-11-13 15:26:50 +00:00
|
|
|
resp.status = 'error'
|
|
|
|
|
|
|
|
resp.put()
|
|
|
|
|
|
|
|
# Pass the AP response status code and body through as our response
|
2020-06-06 15:39:44 +00:00
|
|
|
if last_success:
|
2021-07-11 15:48:28 +00:00
|
|
|
return last_success.text, last_success.status_code
|
2020-11-13 17:50:14 +00:00
|
|
|
elif isinstance(error, (requests.HTTPError, exc.HTTPBadGateway)):
|
2021-07-11 15:48:28 +00:00
|
|
|
return str(error), error.status_code
|
2018-12-11 16:00:38 +00:00
|
|
|
else:
|
2021-07-11 15:48:28 +00:00
|
|
|
return str(error)
|
2018-11-13 15:26:50 +00:00
|
|
|
|
2020-06-06 15:39:44 +00:00
|
|
|
def _targets(self):
|
2018-11-19 00:58:52 +00:00
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
Returns: list of string URLs, the source's inReplyTos or objects
|
|
|
|
(if appropriate)
|
2018-11-19 00:58:52 +00:00
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
targets = util.get_urls(self.source_obj, 'inReplyTo')
|
|
|
|
if targets:
|
|
|
|
return targets
|
2018-11-19 00:58:52 +00:00
|
|
|
|
|
|
|
if self.source_obj.get('verb') in source.VERBS_WITH_OBJECT:
|
2020-06-06 15:39:44 +00:00
|
|
|
return util.get_urls(self.source_obj, 'object')
|
2018-11-13 15:26:50 +00:00
|
|
|
|
|
|
|
def _activitypub_targets(self):
|
|
|
|
"""
|
|
|
|
Returns: list of (Response, string inbox URL)
|
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
# if there's in-reply-to, like-of, or repost-of, they're the targets.
|
2018-11-13 15:26:50 +00:00
|
|
|
# otherwise, it's all followers' inboxes.
|
2020-06-06 15:39:44 +00:00
|
|
|
targets = self._targets()
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2020-06-06 15:39:44 +00:00
|
|
|
if not targets:
|
2018-11-13 15:26:50 +00:00
|
|
|
# interpret this as a Create or Update, deliver it to followers
|
2021-02-24 21:41:46 +00:00
|
|
|
inboxes = set()
|
2018-11-20 16:37:57 +00:00
|
|
|
for follower in Follower.query().filter(
|
2018-11-13 15:26:50 +00:00
|
|
|
Follower.key > Key('Follower', self.source_domain + ' '),
|
2018-11-20 16:37:57 +00:00
|
|
|
Follower.key < Key('Follower', self.source_domain + chr(ord(' ') + 1))):
|
2019-08-01 14:36:34 +00:00
|
|
|
if follower.status != 'inactive' and follower.last_follow:
|
2019-12-25 07:26:58 +00:00
|
|
|
actor = json_loads(follower.last_follow).get('actor')
|
2018-11-20 17:47:01 +00:00
|
|
|
if actor and isinstance(actor, dict):
|
2021-02-24 21:41:46 +00:00
|
|
|
inboxes.add(actor.get('endpoints', {}).get('sharedInbox') or
|
|
|
|
actor.get('publicInbox')or
|
|
|
|
actor.get('inbox'))
|
2018-11-13 15:26:50 +00:00
|
|
|
return [(Response.get_or_create(
|
|
|
|
source=self.source_url, target=inbox, direction='out',
|
2019-12-25 07:26:58 +00:00
|
|
|
protocol='activitypub', source_mf2=json_dumps(self.source_mf2)),
|
2018-11-13 15:26:50 +00:00
|
|
|
inbox)
|
2021-02-24 21:41:46 +00:00
|
|
|
for inbox in sorted(inboxes) if inbox]
|
2017-10-02 04:43:01 +00:00
|
|
|
|
2020-06-06 15:39:44 +00:00
|
|
|
resps_and_inbox_urls = []
|
|
|
|
for target in targets:
|
|
|
|
# fetch target page as AS2 object
|
|
|
|
try:
|
|
|
|
self.target_resp = common.get_as2(target)
|
|
|
|
except (requests.HTTPError, exc.HTTPBadGateway) as e:
|
|
|
|
self.target_resp = getattr(e, 'response', None)
|
|
|
|
if self.target_resp and self.target_resp.status_code // 100 == 2:
|
|
|
|
content_type = common.content_type(self.target_resp) or ''
|
|
|
|
if content_type.startswith('text/html'):
|
|
|
|
# TODO: pass e.response to try_salmon()'s target_resp
|
|
|
|
continue # give up
|
|
|
|
raise
|
|
|
|
target_url = self.target_resp.url or target
|
|
|
|
|
|
|
|
resp = Response.get_or_create(
|
|
|
|
source=self.source_url, target=target_url, direction='out',
|
|
|
|
protocol='activitypub', source_mf2=json_dumps(self.source_mf2))
|
|
|
|
|
|
|
|
# find target's inbox
|
|
|
|
target_obj = self.target_resp.json()
|
|
|
|
resp.target_as2 = json_dumps(target_obj)
|
|
|
|
inbox_url = target_obj.get('inbox')
|
|
|
|
|
|
|
|
if not inbox_url:
|
|
|
|
# TODO: test actor/attributedTo and not, with/without inbox
|
|
|
|
actor = (util.get_first(target_obj, 'actor') or
|
|
|
|
util.get_first(target_obj, 'attributedTo'))
|
|
|
|
if isinstance(actor, dict):
|
|
|
|
inbox_url = actor.get('inbox')
|
|
|
|
actor = actor.get('url') or actor.get('id')
|
|
|
|
if not inbox_url and not actor:
|
2021-07-11 15:48:28 +00:00
|
|
|
return error('Target object has no actor or attributedTo with URL or id.')
|
2020-06-06 15:39:44 +00:00
|
|
|
elif not isinstance(actor, str):
|
2021-07-11 15:48:28 +00:00
|
|
|
return error(f'Target actor or attributedTo has unexpected url or id object: {actor}')
|
2020-06-06 15:39:44 +00:00
|
|
|
|
|
|
|
if not inbox_url:
|
|
|
|
# fetch actor as AS object
|
|
|
|
actor = common.get_as2(actor).json()
|
|
|
|
inbox_url = actor.get('inbox')
|
|
|
|
|
|
|
|
if not inbox_url:
|
|
|
|
# TODO: probably need a way to save errors like this so that we can
|
|
|
|
# return them if ostatus fails too.
|
2021-07-11 15:48:28 +00:00
|
|
|
# return error('Target actor has no inbox')
|
2020-06-06 15:39:44 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
inbox_url = urllib.parse.urljoin(target_url, inbox_url)
|
|
|
|
resps_and_inbox_urls.append((resp, inbox_url))
|
|
|
|
|
|
|
|
return resps_and_inbox_urls
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2018-11-19 00:58:52 +00:00
|
|
|
def try_salmon(self):
|
2021-07-11 15:48:28 +00:00
|
|
|
"""
|
|
|
|
Returns Flask response (string body or tuple) if we attempted OStatus
|
|
|
|
delivery (whether successful or not), None if we didn't attempt, raises
|
|
|
|
an exception otherwise.
|
|
|
|
"""
|
2020-06-06 15:39:44 +00:00
|
|
|
target = None
|
|
|
|
if self.target_resp:
|
|
|
|
target = self.target_resp.url
|
|
|
|
else:
|
|
|
|
targets = self._targets()
|
|
|
|
if targets:
|
|
|
|
target = targets[0]
|
2018-11-27 15:44:27 +00:00
|
|
|
if not target:
|
|
|
|
logging.warning("No targets or followers. Ignoring.")
|
2021-07-11 15:48:28 +00:00
|
|
|
return
|
2018-11-27 15:44:27 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
resp = Response.get_or_create(
|
2018-11-19 00:58:52 +00:00
|
|
|
source=self.source_url, target=target, direction='out',
|
2019-12-25 07:26:58 +00:00
|
|
|
source_mf2=json_dumps(self.source_mf2))
|
2018-11-13 15:26:50 +00:00
|
|
|
resp.protocol = 'ostatus'
|
2017-10-10 00:29:50 +00:00
|
|
|
|
2018-11-13 15:26:50 +00:00
|
|
|
try:
|
2018-11-19 00:58:52 +00:00
|
|
|
ret = self._try_salmon(resp)
|
2018-11-13 15:26:50 +00:00
|
|
|
resp.status = 'complete'
|
2018-11-19 00:58:52 +00:00
|
|
|
return ret
|
2018-11-13 15:26:50 +00:00
|
|
|
except:
|
|
|
|
resp.status = 'error'
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
resp.put()
|
|
|
|
|
2018-11-19 00:58:52 +00:00
|
|
|
def _try_salmon(self, resp):
|
2018-11-13 15:26:50 +00:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
resp: Response
|
|
|
|
"""
|
2017-08-23 15:14:51 +00:00
|
|
|
# fetch target HTML page, extract Atom rel-alternate link
|
2020-06-06 15:39:44 +00:00
|
|
|
target = resp.target()
|
2018-11-19 00:58:52 +00:00
|
|
|
if not self.target_resp:
|
2020-06-06 15:39:44 +00:00
|
|
|
self.target_resp = common.requests_get(target)
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2019-10-04 04:08:26 +00:00
|
|
|
parsed = util.parse_html(self.target_resp)
|
2017-10-20 14:49:25 +00:00
|
|
|
atom_url = parsed.find('link', rel='alternate', type=common.CONTENT_TYPE_ATOM)
|
|
|
|
if not atom_url or not atom_url.get('href'):
|
2021-07-11 15:48:28 +00:00
|
|
|
return error(f'Target post {resp.target()} has no Atom link')
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2017-10-18 02:39:00 +00:00
|
|
|
# fetch Atom target post, extract and inject id into source object
|
2019-09-23 14:19:01 +00:00
|
|
|
base_url = ''
|
|
|
|
base = parsed.find('base')
|
|
|
|
if base and base.get('href'):
|
|
|
|
base_url = base['href']
|
|
|
|
atom_link = parsed.find('link', rel='alternate', type=common.CONTENT_TYPE_ATOM)
|
2019-12-26 06:20:57 +00:00
|
|
|
atom_url = urllib.parse.urljoin(
|
|
|
|
resp.target(), urllib.parse.urljoin(base_url, atom_link['href']))
|
2019-09-23 14:19:01 +00:00
|
|
|
|
|
|
|
feed = common.requests_get(atom_url).text
|
2017-08-23 15:14:51 +00:00
|
|
|
parsed = feedparser.parse(feed)
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'Parsed: {json_dumps(parsed, indent=2)}')
|
2017-09-02 03:49:00 +00:00
|
|
|
entry = parsed.entries[0]
|
|
|
|
target_id = entry.id
|
2018-11-13 15:26:50 +00:00
|
|
|
in_reply_to = self.source_obj.get('inReplyTo')
|
|
|
|
source_obj_obj = self.source_obj.get('object')
|
2017-10-18 02:39:00 +00:00
|
|
|
if in_reply_to:
|
2020-06-06 15:39:44 +00:00
|
|
|
for elem in in_reply_to:
|
|
|
|
if elem.get('url') == target:
|
|
|
|
elem['id'] = target_id
|
2017-10-18 02:39:00 +00:00
|
|
|
elif isinstance(source_obj_obj, dict):
|
|
|
|
source_obj_obj['id'] = target_id
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2017-09-02 03:49:00 +00:00
|
|
|
# Mastodon (and maybe others?) require a rel-mentioned link to the
|
|
|
|
# original post's author to make it show up as a reply:
|
|
|
|
# app/services/process_interaction_service.rb
|
|
|
|
# ...so add them as a tag, which atom renders as a rel-mention link.
|
2017-09-03 22:26:41 +00:00
|
|
|
authors = entry.get('authors', None)
|
|
|
|
if authors:
|
|
|
|
url = entry.authors[0].get('href')
|
2017-09-02 03:49:00 +00:00
|
|
|
if url:
|
2018-11-13 15:26:50 +00:00
|
|
|
self.source_obj.setdefault('tags', []).append({'url': url})
|
2017-09-02 03:49:00 +00:00
|
|
|
|
2017-10-18 02:39:00 +00:00
|
|
|
# extract and discover salmon endpoint
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'Discovering Salmon endpoint in {atom_url}')
|
2017-08-23 15:14:51 +00:00
|
|
|
endpoint = django_salmon.discover_salmon_endpoint(feed)
|
2017-09-02 03:49:00 +00:00
|
|
|
|
|
|
|
if not endpoint:
|
|
|
|
# try webfinger
|
2019-12-26 06:20:57 +00:00
|
|
|
parsed = urllib.parse.urlparse(resp.target())
|
2017-09-03 23:01:52 +00:00
|
|
|
# TODO: test missing email
|
2019-12-26 22:42:58 +00:00
|
|
|
author = entry.get('author_detail', {})
|
|
|
|
email = author.get('email') or '@'.join(
|
|
|
|
(author.get('name', ''), parsed.netloc))
|
2017-09-02 03:49:00 +00:00
|
|
|
try:
|
2017-10-04 14:09:02 +00:00
|
|
|
# TODO: always https?
|
2018-11-19 00:58:52 +00:00
|
|
|
profile = common.requests_get(
|
2017-10-04 14:09:02 +00:00
|
|
|
'%s://%s/.well-known/webfinger?resource=acct:%s' %
|
2021-04-04 04:15:14 +00:00
|
|
|
(parsed.scheme, parsed.netloc, email), parse_json=True)
|
2021-03-30 05:54:33 +00:00
|
|
|
endpoint = django_salmon.get_salmon_replies_link(profile)
|
2017-09-02 03:49:00 +00:00
|
|
|
except requests.HTTPError as e:
|
|
|
|
pass
|
|
|
|
|
2017-08-23 15:14:51 +00:00
|
|
|
if not endpoint:
|
2021-07-11 15:48:28 +00:00
|
|
|
return error('No salmon endpoint found!')
|
|
|
|
logging.info(f'Discovered Salmon endpoint {endpoint}')
|
2017-08-23 15:14:51 +00:00
|
|
|
|
|
|
|
# construct reply Atom object
|
2018-11-13 15:26:50 +00:00
|
|
|
self.source_url = resp.source()
|
|
|
|
activity = self.source_obj
|
|
|
|
if self.source_obj.get('verb') not in source.VERBS_WITH_OBJECT:
|
|
|
|
activity = {'object': self.source_obj}
|
|
|
|
entry = atom.activity_to_atom(activity, xml_base=self.source_url)
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'Converted {self.source_url} to Atom:\n{entry}')
|
2017-08-23 15:14:51 +00:00
|
|
|
|
|
|
|
# sign reply and wrap in magic envelope
|
2019-12-26 06:20:57 +00:00
|
|
|
domain = urllib.parse.urlparse(self.source_url).netloc
|
2017-10-10 00:29:50 +00:00
|
|
|
key = MagicKey.get_or_create(domain)
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'Using key for {domain}: {key}')
|
2017-08-23 15:14:51 +00:00
|
|
|
magic_envelope = magicsigs.magic_envelope(
|
2019-12-26 06:20:57 +00:00
|
|
|
entry, common.CONTENT_TYPE_ATOM, key).decode()
|
2017-08-23 15:14:51 +00:00
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
logging.info(f'Sending Salmon slap to {endpoint}')
|
2017-08-23 15:14:51 +00:00
|
|
|
common.requests_post(
|
2017-10-20 14:49:25 +00:00
|
|
|
endpoint, data=common.XML_UTF8 + magic_envelope,
|
|
|
|
headers={'Content-Type': common.CONTENT_TYPE_MAGIC_ENVELOPE})
|
2021-07-11 15:48:28 +00:00
|
|
|
|
|
|
|
return ''
|
2017-08-15 14:39:22 +00:00
|
|
|
|
|
|
|
|
2021-07-11 15:48:28 +00:00
|
|
|
app.add_url_rule('/webmention', view_func=Webmention.as_view('webmention'),
|
|
|
|
methods=['POST'])
|