2017-09-12 14:31:50 +00:00
|
|
|
# coding=utf-8
|
2017-08-15 06:07:24 +00:00
|
|
|
"""Misc common utilities.
|
|
|
|
"""
|
2017-11-05 23:50:23 +00:00
|
|
|
import itertools
|
2017-08-15 06:07:24 +00:00
|
|
|
import logging
|
2021-07-11 23:30:14 +00:00
|
|
|
import os
|
2017-09-13 14:48:32 +00:00
|
|
|
import re
|
2019-12-26 06:20:57 +00:00
|
|
|
import urllib.parse
|
2017-08-15 06:07:24 +00:00
|
|
|
|
2021-07-18 04:22:13 +00:00
|
|
|
from flask import request
|
2017-09-28 14:25:21 +00:00
|
|
|
from granary import as2
|
2021-07-10 15:53:37 +00:00
|
|
|
from oauth_dropins.webutil import util, webmention
|
2021-08-16 18:47:31 +00:00
|
|
|
from oauth_dropins.webutil.flask_util import error
|
2017-08-15 06:07:24 +00:00
|
|
|
import requests
|
2021-08-16 18:47:31 +00:00
|
|
|
from werkzeug.exceptions import BadGateway
|
2017-08-15 14:39:22 +00:00
|
|
|
|
2017-10-16 14:13:43 +00:00
|
|
|
from models import Response
|
|
|
|
|
2019-12-26 06:20:57 +00:00
|
|
|
DOMAIN_RE = r'([^/:]+\.[^/:]+)'
|
2017-09-03 19:54:10 +00:00
|
|
|
ACCT_RE = r'(?:acct:)?([^@]+)@' + DOMAIN_RE
|
2020-03-03 15:57:52 +00:00
|
|
|
TLD_BLOCKLIST = ('7z', 'asp', 'aspx', 'gif', 'html', 'ico', 'jpg', 'jpeg', 'js',
|
|
|
|
'json', 'php', 'png', 'rar', 'txt', 'yaml', 'yml', 'zip')
|
2017-08-15 06:07:24 +00:00
|
|
|
HEADERS = {
|
2017-08-19 14:41:25 +00:00
|
|
|
'User-Agent': 'Bridgy Fed (https://fed.brid.gy/)',
|
2017-08-15 06:07:24 +00:00
|
|
|
}
|
2017-09-02 03:49:00 +00:00
|
|
|
XML_UTF8 = "<?xml version='1.0' encoding='UTF-8'?>\n"
|
2017-09-13 14:48:32 +00:00
|
|
|
LINK_HEADER_RE = re.compile(r""" *< *([^ >]+) *> *; *rel=['"]([^'"]+)['"] *""")
|
2017-09-28 14:25:21 +00:00
|
|
|
AS2_PUBLIC_AUDIENCE = 'https://www.w3.org/ns/activitystreams#Public'
|
2017-09-12 14:31:50 +00:00
|
|
|
|
2017-10-21 04:05:35 +00:00
|
|
|
# Content-Type values. All non-unicode strings because App Engine's wsgi.py
|
|
|
|
# requires header values to be str, not unicode.
|
|
|
|
#
|
|
|
|
# ActivityPub Content-Type details:
|
2017-10-20 14:00:42 +00:00
|
|
|
# https://www.w3.org/TR/activitypub/#retrieving-objects
|
2019-12-26 06:20:57 +00:00
|
|
|
CONTENT_TYPE_AS2_LD = 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"'
|
|
|
|
CONTENT_TYPE_AS2 = 'application/activity+json'
|
|
|
|
CONTENT_TYPE_AS1 = 'application/stream+json'
|
|
|
|
CONTENT_TYPE_HTML = 'text/html; charset=utf-8'
|
|
|
|
CONTENT_TYPE_ATOM = 'application/atom+xml'
|
|
|
|
CONTENT_TYPE_MAGIC_ENVELOPE = 'application/magic-envelope+xml'
|
2017-10-20 14:49:25 +00:00
|
|
|
|
2017-10-20 14:00:42 +00:00
|
|
|
CONNEG_HEADERS_AS2 = {
|
|
|
|
'Accept': '%s; q=0.9, %s; q=0.8' % (CONTENT_TYPE_AS2, CONTENT_TYPE_AS2_LD),
|
|
|
|
}
|
2017-10-20 14:49:25 +00:00
|
|
|
CONNEG_HEADERS_AS2_HTML = {
|
|
|
|
'Accept': CONNEG_HEADERS_AS2['Accept'] + ', %s; q=0.7' % CONTENT_TYPE_HTML,
|
|
|
|
}
|
2017-10-20 14:00:42 +00:00
|
|
|
|
2017-10-16 14:13:43 +00:00
|
|
|
SUPPORTED_VERBS = (
|
|
|
|
'checkin',
|
|
|
|
'create',
|
2018-10-23 14:11:44 +00:00
|
|
|
'follow',
|
2017-10-16 14:13:43 +00:00
|
|
|
'like',
|
2018-10-15 15:09:36 +00:00
|
|
|
'post',
|
2017-10-16 14:13:43 +00:00
|
|
|
'share',
|
|
|
|
'tag',
|
|
|
|
'update',
|
|
|
|
)
|
|
|
|
|
2020-12-30 18:26:48 +00:00
|
|
|
PRIMARY_DOMAIN = 'fed.brid.gy'
|
|
|
|
OTHER_DOMAINS = (
|
|
|
|
'bridgy-federated.appspot.com',
|
|
|
|
'localhost',
|
|
|
|
)
|
|
|
|
DOMAINS = (PRIMARY_DOMAIN,) + OTHER_DOMAINS
|
2017-10-24 04:49:43 +00:00
|
|
|
|
2017-08-15 06:07:24 +00:00
|
|
|
|
|
|
|
def requests_get(url, **kwargs):
|
2017-08-23 15:14:51 +00:00
|
|
|
return _requests_fn(util.requests_get, url, **kwargs)
|
2017-08-15 06:07:24 +00:00
|
|
|
|
|
|
|
|
2017-08-15 14:39:22 +00:00
|
|
|
def requests_post(url, **kwargs):
|
2017-08-23 15:14:51 +00:00
|
|
|
return _requests_fn(util.requests_post, url, **kwargs)
|
2017-08-15 06:07:24 +00:00
|
|
|
|
|
|
|
|
2017-10-20 14:49:25 +00:00
|
|
|
def _requests_fn(fn, url, parse_json=False, **kwargs):
|
2017-08-15 14:39:22 +00:00
|
|
|
"""Wraps requests.* and adds raise_for_status() and User-Agent."""
|
|
|
|
kwargs.setdefault('headers', {}).update(HEADERS)
|
2019-10-04 04:08:26 +00:00
|
|
|
resp = fn(url, gateway=True, **kwargs)
|
2017-10-20 14:49:25 +00:00
|
|
|
|
|
|
|
logging.info('Got %s headers:%s', resp.status_code, resp.headers)
|
2017-10-21 03:35:07 +00:00
|
|
|
type = content_type(resp)
|
2017-10-21 04:05:35 +00:00
|
|
|
if (type and type != 'text/html' and
|
2017-10-24 14:30:33 +00:00
|
|
|
(type.startswith('text/') or type.endswith('+json') or type.endswith('/json'))):
|
2017-10-20 14:49:25 +00:00
|
|
|
logging.info(resp.text)
|
2017-10-17 05:27:54 +00:00
|
|
|
|
2017-08-23 15:14:51 +00:00
|
|
|
if parse_json:
|
2017-08-15 14:39:22 +00:00
|
|
|
try:
|
|
|
|
return resp.json()
|
2017-10-22 22:59:51 +00:00
|
|
|
except ValueError:
|
2017-10-19 05:47:03 +00:00
|
|
|
msg = "Couldn't parse response as JSON"
|
2017-10-22 22:59:51 +00:00
|
|
|
logging.info(msg, exc_info=True)
|
2021-08-18 15:26:10 +00:00
|
|
|
raise BadGateway(msg)
|
2017-08-15 14:39:22 +00:00
|
|
|
|
|
|
|
return resp
|
2017-08-23 15:14:51 +00:00
|
|
|
|
|
|
|
|
2017-10-20 14:00:42 +00:00
|
|
|
def get_as2(url):
|
|
|
|
"""Tries to fetch the given URL as ActivityStreams 2.
|
|
|
|
|
|
|
|
Uses HTTP content negotiation via the Content-Type header. If the url is
|
|
|
|
HTML and it has a rel-alternate link with an AS2 content type, fetches and
|
|
|
|
returns that URL.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
url: string
|
|
|
|
|
|
|
|
Returns:
|
2021-07-12 20:49:48 +00:00
|
|
|
:class:`requests.Response`
|
2017-10-20 14:00:42 +00:00
|
|
|
|
|
|
|
Raises:
|
2021-08-16 18:47:31 +00:00
|
|
|
:class:`requests.HTTPError`, :class:`werkzeug.exceptions.HTTPException`
|
2017-10-20 14:49:25 +00:00
|
|
|
|
2021-08-16 18:47:31 +00:00
|
|
|
If we raise a werkzeug HTTPException, it will have an additional
|
2021-08-18 15:26:10 +00:00
|
|
|
requests_response attribute with the last requests.Response we received.
|
2017-10-20 14:00:42 +00:00
|
|
|
"""
|
2017-10-20 14:49:25 +00:00
|
|
|
def _error(resp):
|
2017-10-20 14:00:42 +00:00
|
|
|
msg = "Couldn't fetch %s as ActivityStreams 2" % url
|
2020-05-19 15:08:12 +00:00
|
|
|
logging.warning(msg)
|
2021-08-16 18:47:31 +00:00
|
|
|
err = BadGateway(msg)
|
2021-08-18 15:26:10 +00:00
|
|
|
err.requests_response = resp
|
2017-10-20 14:49:25 +00:00
|
|
|
raise err
|
2017-10-20 14:00:42 +00:00
|
|
|
|
|
|
|
resp = requests_get(url, headers=CONNEG_HEADERS_AS2_HTML)
|
2017-10-21 03:35:07 +00:00
|
|
|
if content_type(resp) in (CONTENT_TYPE_AS2, CONTENT_TYPE_AS2_LD):
|
2017-10-20 14:49:25 +00:00
|
|
|
return resp
|
2017-10-20 14:00:42 +00:00
|
|
|
|
2019-10-04 04:08:26 +00:00
|
|
|
parsed = util.parse_html(resp)
|
2017-10-20 14:00:42 +00:00
|
|
|
as2 = parsed.find('link', rel=('alternate', 'self'), type=(
|
|
|
|
CONTENT_TYPE_AS2, CONTENT_TYPE_AS2_LD))
|
|
|
|
if not (as2 and as2['href']):
|
2017-10-20 14:49:25 +00:00
|
|
|
_error(resp)
|
2017-10-20 14:00:42 +00:00
|
|
|
|
2019-12-26 06:20:57 +00:00
|
|
|
resp = requests_get(urllib.parse.urljoin(resp.url, as2['href']),
|
2017-10-20 14:00:42 +00:00
|
|
|
headers=CONNEG_HEADERS_AS2)
|
2017-10-21 03:35:07 +00:00
|
|
|
if content_type(resp) in (CONTENT_TYPE_AS2, CONTENT_TYPE_AS2_LD):
|
2017-10-20 14:49:25 +00:00
|
|
|
return resp
|
2017-10-20 14:00:42 +00:00
|
|
|
|
2017-10-20 14:49:25 +00:00
|
|
|
_error(resp)
|
2017-10-20 14:00:42 +00:00
|
|
|
|
|
|
|
|
2017-10-21 03:35:07 +00:00
|
|
|
def content_type(resp):
|
2021-07-12 20:49:48 +00:00
|
|
|
"""Returns a :class:`requests.Response`'s Content-Type, without charset suffix."""
|
2017-10-21 03:35:07 +00:00
|
|
|
type = resp.headers.get('Content-Type')
|
|
|
|
if type:
|
|
|
|
return type.split(';')[0]
|
|
|
|
|
|
|
|
|
2021-07-09 05:50:33 +00:00
|
|
|
def send_webmentions(activity_wrapped, proxy=None, **response_props):
|
|
|
|
"""Sends webmentions for an incoming Salmon slap or ActivityPub inbox delivery.
|
|
|
|
Args:
|
|
|
|
activity_wrapped: dict, AS1 activity
|
|
|
|
response_props: passed through to the newly created Responses
|
|
|
|
"""
|
|
|
|
activity = redirect_unwrap(activity_wrapped)
|
|
|
|
|
|
|
|
verb = activity.get('verb')
|
|
|
|
if verb and verb not in SUPPORTED_VERBS:
|
2021-08-16 18:47:31 +00:00
|
|
|
error(f'{verb} activities are not supported yet.')
|
2021-07-09 05:50:33 +00:00
|
|
|
|
|
|
|
# extract source and targets
|
|
|
|
source = activity.get('url') or activity.get('id')
|
|
|
|
obj = activity.get('object')
|
|
|
|
obj_url = util.get_url(obj)
|
|
|
|
|
|
|
|
targets = util.get_list(activity, 'inReplyTo')
|
|
|
|
if isinstance(obj, dict):
|
|
|
|
if not source or verb in ('create', 'post', 'update'):
|
|
|
|
source = obj_url or obj.get('id')
|
|
|
|
targets.extend(util.get_list(obj, 'inReplyTo'))
|
|
|
|
|
|
|
|
tags = util.get_list(activity_wrapped, 'tags')
|
|
|
|
obj_wrapped = activity_wrapped.get('object')
|
|
|
|
if isinstance(obj_wrapped, dict):
|
|
|
|
tags.extend(util.get_list(obj_wrapped, 'tags'))
|
|
|
|
for tag in tags:
|
|
|
|
if tag.get('objectType') == 'mention':
|
|
|
|
url = tag.get('url')
|
|
|
|
if url and url.startswith(request.host_url):
|
|
|
|
targets.append(redirect_unwrap(url))
|
|
|
|
|
|
|
|
if verb in ('follow', 'like', 'share'):
|
|
|
|
targets.append(obj_url)
|
|
|
|
|
|
|
|
targets = util.dedupe_urls(util.get_url(t) for t in targets)
|
|
|
|
if not source:
|
2021-08-06 17:29:25 +00:00
|
|
|
error("Couldn't find original post URL")
|
2021-07-09 05:50:33 +00:00
|
|
|
if not targets:
|
2021-08-06 17:29:25 +00:00
|
|
|
error("Couldn't find any target URLs in inReplyTo, object, or mention tags")
|
2021-07-09 05:50:33 +00:00
|
|
|
|
|
|
|
# send webmentions and store Responses
|
2021-09-01 15:19:38 +00:00
|
|
|
errors = [] # stores (code, body) tuples
|
2021-07-09 05:50:33 +00:00
|
|
|
for target in targets:
|
|
|
|
if util.domain_from_link(target) == util.domain_from_link(source):
|
|
|
|
logging.info('Skipping same-domain webmention from %s to %s',
|
|
|
|
source, target)
|
|
|
|
continue
|
|
|
|
|
|
|
|
response = Response(source=source, target=target, direction='in',
|
|
|
|
**response_props)
|
|
|
|
response.put()
|
|
|
|
wm_source = (response.proxy_url()
|
|
|
|
if verb in ('follow', 'like', 'share') or proxy
|
|
|
|
else source)
|
|
|
|
logging.info('Sending webmention from %s to %s', wm_source, target)
|
|
|
|
|
|
|
|
try:
|
|
|
|
endpoint = webmention.discover(target, headers=HEADERS).endpoint
|
|
|
|
if endpoint:
|
|
|
|
webmention.send(endpoint, wm_source, target, headers=HEADERS)
|
|
|
|
response.status = 'complete'
|
|
|
|
logging.info('Success!')
|
|
|
|
except BaseException as e:
|
2021-09-01 15:19:38 +00:00
|
|
|
errors.append(util.interpret_http_exception(e))
|
2021-07-09 05:50:33 +00:00
|
|
|
response.put()
|
|
|
|
|
|
|
|
if errors:
|
2021-09-01 15:19:38 +00:00
|
|
|
msg = 'Errors: ' + ', '.join(f'{code} {body}' for code, body in errors)
|
|
|
|
error(msg, status=int(errors[0][0] or 502))
|
2020-01-31 15:38:58 +00:00
|
|
|
|
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
def postprocess_as2(activity, target=None, key=None):
|
|
|
|
"""Prepare an AS2 object to be served or sent via ActivityPub.
|
2017-10-01 14:01:35 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
Args:
|
|
|
|
activity: dict, AS2 object or activity
|
|
|
|
target: dict, AS2 object, optional. The target of activity's inReplyTo or
|
|
|
|
Like/Announce/etc object, if any.
|
2021-07-12 20:49:48 +00:00
|
|
|
key: :class:`models.MagicKey`, optional. populated into publicKey field
|
|
|
|
if provided.
|
2021-07-08 04:02:13 +00:00
|
|
|
"""
|
|
|
|
type = activity.get('type')
|
|
|
|
|
|
|
|
# actor objects
|
|
|
|
if type == 'Person':
|
|
|
|
postprocess_as2_actor(activity)
|
|
|
|
if not activity.get('publicKey'):
|
|
|
|
# underspecified, inferred from this issue and Mastodon's implementation:
|
|
|
|
# https://github.com/w3c/activitypub/issues/203#issuecomment-297553229
|
|
|
|
# https://github.com/tootsuite/mastodon/blob/bc2c263504e584e154384ecc2d804aeb1afb1ba3/app/services/activitypub/process_account_service.rb#L77
|
|
|
|
activity.update({
|
|
|
|
'publicKey': {
|
|
|
|
'id': activity.get('preferredUsername'),
|
|
|
|
'publicKeyPem': key.public_pem().decode(),
|
|
|
|
},
|
|
|
|
'@context': (util.get_list(activity, '@context') +
|
|
|
|
['https://w3id.org/security/v1']),
|
|
|
|
})
|
|
|
|
return activity
|
|
|
|
|
|
|
|
for actor in (util.get_list(activity, 'attributedTo') +
|
|
|
|
util.get_list(activity, 'actor')):
|
|
|
|
postprocess_as2_actor(actor)
|
|
|
|
|
|
|
|
# inReplyTo: singly valued, prefer id over url
|
|
|
|
target_id = target.get('id') if target else None
|
|
|
|
in_reply_to = activity.get('inReplyTo')
|
|
|
|
if in_reply_to:
|
|
|
|
if target_id:
|
|
|
|
activity['inReplyTo'] = target_id
|
|
|
|
elif isinstance(in_reply_to, list):
|
|
|
|
if len(in_reply_to) > 1:
|
|
|
|
logging.warning(
|
|
|
|
"AS2 doesn't support multiple inReplyTo URLs! "
|
|
|
|
'Only using the first: %s' % in_reply_to[0])
|
|
|
|
activity['inReplyTo'] = in_reply_to[0]
|
|
|
|
|
|
|
|
# Mastodon evidently requires a Mention tag for replies to generate a
|
|
|
|
# notification to the original post's author. not required for likes,
|
|
|
|
# reposts, etc. details:
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/34
|
|
|
|
if target:
|
|
|
|
for to in (util.get_list(target, 'attributedTo') +
|
|
|
|
util.get_list(target, 'actor')):
|
|
|
|
if isinstance(to, dict):
|
|
|
|
to = to.get('url') or to.get('id')
|
|
|
|
if to:
|
|
|
|
activity.setdefault('tag', []).append({
|
|
|
|
'type': 'Mention',
|
|
|
|
'href': to,
|
|
|
|
})
|
|
|
|
|
|
|
|
# activity objects (for Like, Announce, etc): prefer id over url
|
|
|
|
obj = activity.get('object')
|
|
|
|
if obj:
|
|
|
|
if isinstance(obj, dict) and not obj.get('id'):
|
|
|
|
obj['id'] = target_id or obj.get('url')
|
|
|
|
elif target_id and obj != target_id:
|
|
|
|
activity['object'] = target_id
|
|
|
|
|
|
|
|
# id is required for most things. default to url if it's not set.
|
|
|
|
if not activity.get('id'):
|
|
|
|
activity['id'] = activity.get('url')
|
|
|
|
|
|
|
|
# TODO: find a better way to check this, sometimes or always?
|
|
|
|
# removed for now since it fires on posts without u-id or u-url, eg
|
|
|
|
# https://chrisbeckstrom.com/2018/12/27/32551/
|
|
|
|
# assert activity.get('id') or (isinstance(obj, dict) and obj.get('id'))
|
|
|
|
|
|
|
|
activity['id'] = redirect_wrap(activity.get('id'))
|
|
|
|
activity['url'] = redirect_wrap(activity.get('url'))
|
|
|
|
|
|
|
|
# copy image(s) into attachment(s). may be Mastodon-specific.
|
|
|
|
# https://github.com/snarfed/bridgy-fed/issues/33#issuecomment-440965618
|
|
|
|
obj_or_activity = obj if isinstance(obj, dict) else activity
|
|
|
|
obj_or_activity.setdefault('attachment', []).extend(
|
|
|
|
obj_or_activity.get('image', []))
|
|
|
|
|
|
|
|
# cc public and target's author(s) and recipients
|
|
|
|
# https://www.w3.org/TR/activitystreams-vocabulary/#audienceTargeting
|
|
|
|
# https://w3c.github.io/activitypub/#delivery
|
|
|
|
if type in as2.TYPE_TO_VERB or type in ('Article', 'Note'):
|
|
|
|
recips = [AS2_PUBLIC_AUDIENCE]
|
|
|
|
if target:
|
|
|
|
recips += itertools.chain(*(util.get_list(target, field) for field in
|
|
|
|
('actor', 'attributedTo', 'to', 'cc')))
|
|
|
|
activity['cc'] = util.dedupe_urls(util.get_url(recip) or recip.get('id')
|
|
|
|
for recip in recips)
|
|
|
|
|
|
|
|
# wrap articles and notes in a Create activity
|
|
|
|
if type in ('Article', 'Note'):
|
|
|
|
activity = {
|
|
|
|
'@context': as2.CONTEXT,
|
|
|
|
'type': 'Create',
|
|
|
|
'id': f'{activity["id"]}#bridgy-fed-create',
|
|
|
|
'object': activity,
|
|
|
|
}
|
|
|
|
|
|
|
|
return util.trim_nulls(activity)
|
|
|
|
|
|
|
|
|
|
|
|
def postprocess_as2_actor(actor):
|
|
|
|
"""Prepare an AS2 actor object to be served or sent via ActivityPub.
|
2017-10-24 04:49:43 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
Args:
|
|
|
|
actor: dict, AS2 actor object
|
|
|
|
"""
|
|
|
|
url = actor.get('url')
|
|
|
|
if url:
|
|
|
|
domain = urllib.parse.urlparse(url).netloc
|
|
|
|
actor.setdefault('preferredUsername', domain)
|
|
|
|
actor['id'] = request.host_url + domain
|
|
|
|
actor['url'] = redirect_wrap(url)
|
2018-10-14 14:58:17 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
# required by pixelfed. https://github.com/snarfed/bridgy-fed/issues/39
|
|
|
|
actor.setdefault('summary', '')
|
2019-01-04 15:04:45 +00:00
|
|
|
|
2018-10-14 14:58:17 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
def redirect_wrap(url):
|
|
|
|
"""Returns a URL on our domain that redirects to this URL.
|
2018-10-14 14:58:17 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
...to satisfy Mastodon's non-standard domain matching requirement. :(
|
2018-10-14 14:58:17 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
Args:
|
|
|
|
url: string
|
2018-10-17 14:00:31 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
https://github.com/snarfed/bridgy-fed/issues/16#issuecomment-424799599
|
|
|
|
https://github.com/tootsuite/mastodon/pull/6219#issuecomment-429142747
|
2018-10-17 14:00:31 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
Returns: string, redirect url
|
|
|
|
"""
|
|
|
|
if not url:
|
|
|
|
return url
|
2018-10-17 14:00:31 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
prefix = urllib.parse.urljoin(request.host_url, '/r/')
|
|
|
|
if url.startswith(prefix):
|
|
|
|
return url
|
2018-10-17 14:49:04 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
return prefix + url
|
2020-01-31 15:38:58 +00:00
|
|
|
|
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
def redirect_unwrap(val):
|
|
|
|
"""Removes our redirect wrapping from a URL, if it's there.
|
2020-01-31 15:38:58 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
url may be a string, dict, or list. dicts and lists are unwrapped
|
|
|
|
recursively.
|
|
|
|
|
|
|
|
Strings that aren't wrapped URLs are left unchanged.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
url: string
|
|
|
|
|
|
|
|
Returns: string, unwrapped url
|
|
|
|
"""
|
|
|
|
if isinstance(val, dict):
|
|
|
|
return {k: redirect_unwrap(v) for k, v in val.items()}
|
2018-10-17 14:49:04 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
elif isinstance(val, list):
|
|
|
|
return [redirect_unwrap(v) for v in val]
|
2019-04-16 14:59:29 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
elif isinstance(val, str):
|
|
|
|
prefix = urllib.parse.urljoin(request.host_url, '/r/')
|
|
|
|
if val.startswith(prefix):
|
2021-07-10 15:07:40 +00:00
|
|
|
return util.follow_redirects(val[len(prefix):]).url
|
2021-07-08 04:02:13 +00:00
|
|
|
elif val.startswith(request.host_url):
|
2021-07-10 15:07:40 +00:00
|
|
|
domain = util.domain_from_link(urllib.parse.urlparse(val).path.strip('/'))
|
|
|
|
return util.follow_redirects(domain).url
|
2019-04-16 14:59:29 +00:00
|
|
|
|
2021-07-08 04:02:13 +00:00
|
|
|
return val
|