kopia lustrzana https://gitlab.com/jaywink/federation
Porównaj commity
194 Commity
Autor | SHA1 | Data |
---|---|---|
Alain St-Denis | 1f15583aad | |
Alain St-Denis | 3dfe7d637b | |
Alain St-Denis | e992e2dc20 | |
Alain St-Denis | 2ee17e4aa6 | |
Jason Robinson | a109a7f824 | |
Jason Robinson | e7629c9a92 | |
Alain St-Denis | 0bc025b0d8 | |
Alain St-Denis | 8a4863fcd3 | |
Alain St-Denis | e476a0b8e0 | |
Alain St-Denis | 345a0c0ac3 | |
Alain St-Denis | 2672eede39 | |
Alain St-Denis | 21184c368a | |
Alain St-Denis | d080bcf509 | |
Alain St-Denis | 2509692041 | |
Alain St-Denis | f1bb3544fa | |
Alain St-Denis | 47bf0f579d | |
Alain St-Denis | 3f98f1e04e | |
Alain St-Denis | b190626bb9 | |
Alain St-Denis | 4868291747 | |
Alain St-Denis | 64044e7452 | |
Jason Robinson | 31583d29b8 | |
Jason Robinson | 6bb6df6b1d | |
Alain St-Denis | 3e4f4641c8 | |
Alain St-Denis | 64590ee4c3 | |
Jason Robinson | e0592e17d0 | |
Alain St-Denis | d2e0a56c98 | |
Alain St-Denis | db8bc1527c | |
Alain St-Denis | 22d9d30869 | |
Alain St-Denis | 783b0ebc14 | |
Alain St-Denis | a21275bbef | |
Alain St-Denis | c8ff988d32 | |
Alain St-Denis | d4f207ba02 | |
Alain St-Denis | 340e5cafe7 | |
Alain St-Denis | ca3f927aa9 | |
Alain St-Denis | e0115f7966 | |
Alain St-Denis | aa351e27e2 | |
Alain St-Denis | add80e0f6c | |
Alain St-Denis | ada8c20d39 | |
Alain St-Denis | 5dac605c4b | |
Alain St-Denis | 5c168d6630 | |
Alain St-Denis | db87313535 | |
Alain St-Denis | 7559f16f4f | |
Alain St-Denis | 6fd445382d | |
Alain St-Denis | 54a8404c3d | |
Alain St-Denis | 091b156703 | |
Alain St-Denis | cb96d83793 | |
Alain St-Denis | d7e6a56eb6 | |
Alain St-Denis | d53db6299f | |
Alain St-Denis | c87e1c3dd7 | |
Alain St-Denis | b1bc8e7295 | |
Alain St-Denis | 4b5a886492 | |
Alain St-Denis | 33366802c4 | |
Alain St-Denis | 0783bf43aa | |
Alain St-Denis | 7d750d3365 | |
Alain St-Denis | d577e39777 | |
Alain St-Denis | 47af44582c | |
Alain St-Denis | 24f5bb21a9 | |
Alain St-Denis | 1f8d4ac93f | |
Alain St-Denis | 6d885a5c40 | |
Alain St-Denis | 4dca31b17f | |
Alain St-Denis | e0993a7f7f | |
Alain St-Denis | e94533b222 | |
Alain St-Denis | 33131bd9fe | |
Alain St-Denis | f72ecf459a | |
Alain St-Denis | 63a0e38ac9 | |
Alain St-Denis | 60694662a5 | |
Alain St-Denis | 942fa333af | |
Alain St-Denis | 1e5516ef60 | |
Alain St-Denis | 37a5ea1215 | |
Alain St-Denis | 8e50267305 | |
Alain St-Denis | 5ad5212332 | |
Alain St-Denis | b3d5e8629c | |
Alain St-Denis | 24dcbb3d51 | |
Alain St-Denis | 6b9c74b793 | |
Alain St-Denis | a0c4e7fb6e | |
Alain St-Denis | 0c4f8218c7 | |
Alain St-Denis | 90db138f62 | |
Alain St-Denis | 59d5e99d23 | |
Alain St-Denis | e7d954b788 | |
Alain St-Denis | 26f93ec1be | |
Alain St-Denis | 2f4a3e9c16 | |
Alain St-Denis | 41785c2fdc | |
Jason Robinson | 05105e5c46 | |
Jason Robinson | e3474ce284 | |
Jason Robinson | 73b21a688b | |
Jason Robinson | b8a6270241 | |
Jason Robinson | f76f862ddb | |
Alain St-Denis | 913bc300df | |
Alain St-Denis | 0d42bb7018 | |
Alain St-Denis | f787c2f998 | |
Alain St-Denis | 3e0259a35e | |
Alain St-Denis | d27f38dee5 | |
Alain St-Denis | 4d923fc0b4 | |
Alain St-Denis | 9b10ef6779 | |
Alain St-Denis | 012db475e1 | |
Alain St-Denis | 49b29f6ab4 | |
Alain St-Denis | 38fd38101f | |
Alain St-Denis | 6dc2553197 | |
Alain St-Denis | 81c5356c41 | |
Alain St-Denis | bf69c74a63 | |
Alain St-Denis | 7bcb1694d4 | |
Alain St-Denis | c574f69adc | |
Alain St-Denis | 9f1cc9432f | |
Alain St-Denis | 8d8e10b7b3 | |
Alain St-Denis | 1ca67c987e | |
Alain St-Denis | 654a8e4396 | |
Alain St-Denis | 3798d9ddb9 | |
Alain St-Denis | 0e9a74e088 | |
Alain St-Denis | 492601c32d | |
Alain St-Denis | f84b83cb5c | |
Alain St-Denis | 0688e83240 | |
Alain St-Denis | db0b545f24 | |
Alain St-Denis | daa742124d | |
Alain St-Denis | 31e8c9d4c7 | |
Alain St-Denis | 342a239148 | |
Jason Robinson | 8515d538e8 | |
Jason Robinson | 449ea99e97 | |
Jason Robinson | 1603fd7bf7 | |
Jason Robinson | ae720f66b2 | |
Jason Robinson | 288325857a | |
Alain St-Denis | 0051cee3eb | |
Alain St-Denis | 5b560efded | |
Alain St-Denis | b3c97fc701 | |
Alain St-Denis | 1c188fc878 | |
Alain St-Denis | f75dbebb95 | |
Alain St-Denis | 6339c58268 | |
Alain St-Denis | 420292679f | |
Alain St-Denis | 52c96532dc | |
Alain St-Denis | a4019d88f9 | |
Alain St-Denis | 9df803dafe | |
Alain St-Denis | bb6cc724f3 | |
Alain St-Denis | 58c8f95e54 | |
Jason Robinson | 662e2964b6 | |
Jason Robinson | 01357aacb0 | |
jaywink | 0291d9dd98 | |
Jason Robinson | 5a940e75f5 | |
Jason Robinson | dd102d4575 | |
Jason Robinson | 5c79a5d1cb | |
Jason Robinson | 2714f6a973 | |
Jason Robinson | b7e6181594 | |
jaywink | a64107da85 | |
Jason Robinson | d44e2e3c8f | |
Jason Robinson | 1d782f5f5a | |
Jason Robinson | a82f75822e | |
Jason Robinson | ca521d4a03 | |
Jason Robinson | a4228242de | |
Jason Robinson | 7973f3e87c | |
Jason Robinson | 448c8cd780 | |
Jason Robinson | fbaee66da8 | |
Jason Robinson | d5938186ac | |
Jason Robinson | 63b7970c8f | |
Jason Robinson | 9244e96682 | |
Jason Robinson | 3b8d90cf99 | |
jaywink | cc6dd1cc43 | |
Jason Robinson | 36443e85a2 | |
jaywink | 6b3e17083a | |
Jason Robinson | 2fe45a35f0 | |
Jason Robinson | e4db91503b | |
Jason Robinson | 82ac0ce3cf | |
Jason Robinson | 4566b252a1 | |
Jason Robinson | f512d8a9dc | |
Jason Robinson | 0bfadd059e | |
Jason Robinson | 33c6edac7f | |
Jason Robinson | e55f13a117 | |
Jason Robinson | 48373ad615 | |
Jason Robinson | ccd975ce46 | |
Jason Robinson | 8797a08c02 | |
Jason Robinson | c40194da95 | |
Jason Robinson | 0abb33a4fb | |
Jason Robinson | 49e5f0a511 | |
Jason Robinson | 7608047b9e | |
jaywink | 7ac452e65c | |
Jason Robinson | 0d92387339 | |
Jason Robinson | 7bd114ac59 | |
Jason Robinson | 51c46364f3 | |
Jason Robinson | 3dd9921899 | |
Jason Robinson | 602c576aec | |
Jason Robinson | 6d1304ab68 | |
Jason Robinson | dc2102a7b8 | |
Jason Robinson | c469f74e29 | |
Jason Robinson | d981cb7811 | |
Jason Robinson | 499d222719 | |
Jason Robinson | dfc7264465 | |
Jason Robinson | f9d03fd916 | |
Jason Robinson | 82f3aed21b | |
Jason Robinson | d52c21e8bc | |
Jason Robinson | 524e5efbfd | |
Jason Robinson | 449089a59e | |
Jason Robinson | 01dac38921 | |
Jason Robinson | aeb1b37e2c | |
Jason Robinson | 1b461806f0 | |
Jason Robinson | 48be2cbb6e | |
Jason Robinson | 34d04f01f1 | |
Jason Robinson | 153f779fa5 |
|
@ -3,6 +3,7 @@ __pycache__
|
|||
.cache
|
||||
.pytest_cache/
|
||||
|
||||
http_cache.sqlite
|
||||
database.sqlite
|
||||
|
||||
# C extensions
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
# This file is a template, and might need editing before it works on your project.
|
||||
# Official language image. Look for the different tagged releases at:
|
||||
# https://hub.docker.com/r/library/python/tags/
|
||||
image: python:3.8
|
||||
|
||||
# Change pip's cache directory to be inside the project directory since we can
|
||||
# only cache local items.
|
||||
variables:
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache"
|
||||
|
||||
# Pip's cache doesn't store the python packages
|
||||
# https://pip.pypa.io/en/stable/reference/pip_install/#caching
|
||||
#
|
||||
# If you want to also cache the installed packages, you have to install
|
||||
# them in a virtualenv and cache it as well.
|
||||
cache:
|
||||
paths:
|
||||
- .cache/pip
|
||||
- venv/
|
||||
|
||||
before_script:
|
||||
- python -V
|
||||
- pip install virtualenv
|
||||
- virtualenv venv
|
||||
- source venv/bin/activate
|
||||
|
||||
test:
|
||||
script:
|
||||
- pip install tox
|
||||
- tox
|
|
@ -0,0 +1,22 @@
|
|||
# .readthedocs.yaml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Set the version of Python and other tools you might need
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.9"
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
formats: all
|
||||
|
||||
python:
|
||||
install:
|
||||
- requirements: dev-requirements.txt
|
|
@ -0,0 +1,10 @@
|
|||
steps:
|
||||
test:
|
||||
image: python:3.10
|
||||
commands:
|
||||
- python -V
|
||||
- pip install virtualenv
|
||||
- virtualenv venv
|
||||
- . venv/bin/activate
|
||||
- pip install tox
|
||||
- tox
|
309
CHANGELOG.md
309
CHANGELOG.md
|
@ -1,5 +1,226 @@
|
|||
# Changelog
|
||||
|
||||
## Unreleased
|
||||
|
||||
### Changed
|
||||
|
||||
* This is actually both a change and a fix. AP Image objects do not define properties matching the
|
||||
HTML img tag alt and title properties. Image.name is used to render both alt and title, which IMHO is
|
||||
wrong. With this change, markdown images defining the title property will be recognized instead of
|
||||
being thrown away (the fix) and the title property, if defined, will have precedence over the
|
||||
alt property as the Image.name value (the change). Before this change, the client app would properly
|
||||
render the img tag from the markdown source (with distinct alt and title properties), but the Image
|
||||
object would not federate and hence not be displayed on other platforms (namely Mastodon).
|
||||
|
||||
### Fixed
|
||||
|
||||
* Note._find_and_mark_mentions: When an AP Mention object href can't be found in the rendered content,
|
||||
try the name property.
|
||||
|
||||
* Ignore media objects that don't define a media type.
|
||||
|
||||
* Prevent rendered content image duplication when an image is both in the AP payload rendered content
|
||||
and defined as an attachment that doesn't set the inlineImage property.
|
||||
|
||||
* Instead of discarding the whole AP payload out when encountering an undefined or unlisted AP object,
|
||||
log a warning and keep going. Ensure None is returned when a nested field only contains an undefined
|
||||
object.
|
||||
|
||||
* Accept the application/ld+json type for webfinger AP links.
|
||||
|
||||
* Mark an AP mention only if profile.finger is defined.
|
||||
|
||||
* Handle escape sequences for inbound markdown mentions.
|
||||
|
||||
* Extend the Unicode character range allowed in markdown mentions.
|
||||
|
||||
* Discard illegal characters from tag text. Previously, this was done only on tag links.
|
||||
|
||||
## [0.25.1] - 2024-02-18
|
||||
|
||||
### Fixed
|
||||
|
||||
* Address CVE-2024-23832 by ensuring that a pulled AP payload id netloc is the same as the request fid netloc.
|
||||
|
||||
## [0.25.0] - 2024-01-06
|
||||
|
||||
### Added
|
||||
|
||||
* LD signature. Relayable AP payloads signatures are checked (inbound) and signed (outbound). A missing
|
||||
or invalid signature on inbound payloads will trigger a fetch if the sender differs from the author
|
||||
(i.e., a relay).
|
||||
|
||||
* The `signable` attribute has been added. It defaults to `False` and will enforce the fetching of relayed
|
||||
payloads with a bad signature when set to `True`on a given class.
|
||||
|
||||
* The `url` property is now set to the `id` property as some platforms make use of it.
|
||||
|
||||
### Changed
|
||||
|
||||
* Re-implement dynamically generated LD contexts for outbound payloads. AP extensions are defined on a
|
||||
per class/property basis. For classes, a `ctx` attribute is set if required. For properties, the calamus
|
||||
field `metadata` property is used.
|
||||
|
||||
* For inbound payload, a cached dict of all the defined AP extensions is merged with each incoming LD context.
|
||||
|
||||
* Better handle conflicting property defaults by having `get_base_attributes` return only attributes that
|
||||
are not empty (or bool). This helps distinguish between `marshmallow.missing` and empty values.
|
||||
|
||||
* JsonLD document caching now set in `activitypub/__init__.py`.
|
||||
|
||||
* Patch outbound payloads for platform that don't handle arrays compacted to a single value and
|
||||
`as:Public`.
|
||||
|
||||
* Always try to get profiles from the client app before fetching from remote. In support of this, the client
|
||||
app AP profiles must include the keyId and the followers URIs. As a significant side effect, profile retractions
|
||||
are now more likely to succeed.
|
||||
|
||||
* Switch to BeautifulSoup for content parsing. The client app is now expected to provide the
|
||||
rendered content for outbound payloads. Mark inbound AP payload hashtag and mention links
|
||||
and let the client app deal with them.
|
||||
|
||||
* Move process_text_links back to the client app.
|
||||
|
||||
* Handle gotosocial reply collections.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Inbound AP share retractions (undo announce) were deserialized as a `base.Retraction` class, which would
|
||||
throw an error when accessing the missing `signable` attribute. To fix this, a `Retraction` class was added.
|
||||
|
||||
* Because of the additions and changes above, a number of tests needed to be fixed.
|
||||
|
||||
* HTTP signature verification now returns the signature author fid which is used as the actual
|
||||
sender by `message_to_object`.
|
||||
|
||||
* In fetch_document: if response.encoding is not set, default to utf-8.
|
||||
|
||||
* Fix process_text_links that would crash on `a` tags with no `href` attribute.
|
||||
|
||||
* Ignore relayed AP retractions.
|
||||
|
||||
* Fix AP profile processing for hubzilla, guppe and bird.makeup.
|
||||
|
||||
* Unquote and normalize hashtag links.
|
||||
|
||||
* Fix Peertube payload processing when the content property is missing.
|
||||
|
||||
* Ensure the outbound AP profile to property is an array.
|
||||
|
||||
## [0.24.1] - 2023-03-18
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix documentation builds
|
||||
|
||||
## [0.24.0] - 2023-03-18
|
||||
|
||||
### Added
|
||||
|
||||
* Add a validation function for the Activitypub `attributedTo` property. Ensure it starts with `http`.
|
||||
|
||||
### Changed
|
||||
|
||||
* Optimize handle_send by ensuring a payload is only sent once per recipient unique endpoint.
|
||||
|
||||
* Match the Activitypub Hashtag object `href` property value against the raw content in order to make
|
||||
this process platform agnostic.
|
||||
|
||||
### Fixed
|
||||
|
||||
* The Activitypub `url` property can now handle nested Link objects for all defined object types.
|
||||
|
||||
* Catch cases where an Activitypub CollectionPage `next` property points back to a Collection object.
|
||||
|
||||
* Make the Activitypub Follow class handle both the Undo and the Accept activities.
|
||||
|
||||
## [0.23.1] - 2023-02-08
|
||||
|
||||
### Changed
|
||||
|
||||
* Switch `python-httpsig-socialhome` dependency to PyPi packaged version.
|
||||
|
||||
## [0.23.0] - 2023-02-08
|
||||
|
||||
### Added
|
||||
|
||||
* Activitypub payloads are now processed by calamus (https://github.com/SwissDataScienceCenter/calamus),
|
||||
which is a jsonld processor based on marshmallow.
|
||||
|
||||
* A large number of inbound Activitypub objects and properties are deserialized, it's up to the client
|
||||
app to implement the corresponding behavior.
|
||||
|
||||
* Unsupported objects and properties should be easy to implement. Unsupported payloads are logged as such.
|
||||
|
||||
* More AP platforms are now supported (friendica, pixelfed, misskey, pleroma, gotosocial, litepub, and more).
|
||||
The jsonld context some platforms provide sometimes needs to be patched because of missing jsonld term definitions.
|
||||
|
||||
* Peertube Video objects are translated into Posts.
|
||||
|
||||
* For performance, requests_cache has been added. It pulls a redis configuration from django if one exists or
|
||||
falls back to a sqlite backend. Special case: pyld document loader has been extended to use redis directly.
|
||||
|
||||
* Activitypub GET requests are now signed if the django configuration includes FEDERATION_USER which is used to fetch that
|
||||
user's private key.
|
||||
|
||||
* Activitypub remote GET signature is now verified in order to authorize remote access to limited content.
|
||||
|
||||
* Added Video and Audio objects. Inbound support only.
|
||||
|
||||
* Process Activitypub reply collections. When supported by the client app, it allows for a more complete view of
|
||||
conversations, especially for shared content.
|
||||
|
||||
* WIP: initial support for providing reponses to Activitypub collections requests. This release
|
||||
only responds with a count for the followers and following collections.
|
||||
|
||||
### Changed
|
||||
|
||||
* outbound.py doesn't need to set the to and cc Activitypub properties, they are now expected to be set by
|
||||
the client app.
|
||||
|
||||
* Attempts are made to remove duplicate img tags some platforms send (friendica, for one).
|
||||
|
||||
* Activitypub receivers of the followers variant are now correctly processed for all known platforms.
|
||||
|
||||
* Accept images with application/octet-stream content type (with the help of the magic library).
|
||||
|
||||
* user@domain is now the only format used for mentions. The client app is expected to comply. For
|
||||
Activitypub, this means making a webfinger request to validate the handle if the client app doesn't
|
||||
already know the corresponding profile.
|
||||
|
||||
* Because of the change above, ensure mentions in Diaspora outbound payloads are as per their protocol
|
||||
spec (i.e. replacing @user@domain with @{user@domain} in the text)
|
||||
|
||||
### Fixed
|
||||
|
||||
* Signatures are not verified and the corresponding payload is dropped if no public key is found.
|
||||
|
||||
* Sign forwarded AP replies and shares with the target content author's private key.
|
||||
|
||||
### Internal changes
|
||||
|
||||
* Dropped python 3.6 support.
|
||||
|
||||
* Many tests were fixed/updated.
|
||||
|
||||
## [0.22.0] - 2021-08-15
|
||||
|
||||
### Added
|
||||
|
||||
* Work in progress Matrix support over an appservice 😻
|
||||
|
||||
Currently requires Django support. Tested on Dendrite and up to version v0.3.11 only. Features so far:
|
||||
|
||||
* Register local users on the configured Matrix server.
|
||||
* Post local user public posts into Matrix side to their profile timeline rooms and to each hashtag room.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fixed image delivery between platforms that send ActivityPub payloads with a markdown `source`,
|
||||
caused by overenthusiastic linkifying of markdown.
|
||||
|
||||
* Fix a crash in `outbound.handle_send` when payload failed to be generated and `parent_user` was not given.
|
||||
|
||||
## [0.21.0] - 2020-12-20
|
||||
|
||||
### Added
|
||||
|
@ -23,7 +244,7 @@
|
|||
|
||||
If Django is configured, a profile will be retrieved using the configured profile
|
||||
getter function and the profile name or username will be used for the link.
|
||||
|
||||
|
||||
* Add `process_text_links` text utility to linkify URL's in text.
|
||||
|
||||
* Add `find_tags` text utility to find hashtags from text. Optionally the function can
|
||||
|
@ -35,15 +256,15 @@
|
|||
* `str` or `dict` payload
|
||||
* `str` protocol name
|
||||
* `str` sender id
|
||||
|
||||
|
||||
The function will be called for each generated payload.
|
||||
|
||||
* Cross-protocol improvements:
|
||||
* Cross-protocol improvements:
|
||||
* Extract Diaspora guid from ActivityPub payloads implementing the Diaspora extension.
|
||||
* Add Diaspora extension and guid to outbound ActivityPub payloads, if available. For
|
||||
profiles, also add handle.
|
||||
* Extract ActivityPub ID from Diaspora payloads if found as the `activitypub_id` property.
|
||||
* Add ActivityPub ID to outbound Diaspora payloads of types comment, post and profile,
|
||||
* Add ActivityPub ID to outbound Diaspora payloads of types comment, post and profile,
|
||||
if an URL given as `id`.
|
||||
|
||||
### Changed
|
||||
|
@ -52,7 +273,7 @@
|
|||
|
||||
* URL's in outgoing text content are now linkified for the HTML representation
|
||||
of the content for ActivityPub payloads.
|
||||
|
||||
|
||||
* Don't include OStatus for Mastodon 3.0+ protocols list. ([related issue](https://github.com/thefederationinfo/the-federation.info/issues/217))
|
||||
|
||||
* **Backwards incompatible**: Stop markdownifying incoming ActivityPub content. Instead
|
||||
|
@ -66,27 +287,27 @@
|
|||
* Add missing `response.raise_for_status()` call to the `fetch_document` network helper
|
||||
when fetching with given URL. Error status was already being raised correctly when
|
||||
fetching by domain and path.
|
||||
|
||||
|
||||
* Don't crash when parsing an invalid NodeInfo document where the usage dictionary
|
||||
is not following specification.
|
||||
|
||||
|
||||
* Ensure Pixelfed, Kroeg and Kibou instances that emulate the Mastodon API don't get identified as Mastodon instances.
|
||||
|
||||
* Loosen validation of `TargetIDMixin`, it now requires one of the target attributes
|
||||
to be set, not just `target_id`. This fixes follows over the Diaspora protocol which
|
||||
broke with stricter send validation added in 0.19.0.
|
||||
|
||||
|
||||
* Fix some edge case crashes of `handle_send` when there are Diaspora protocol receivers.
|
||||
|
||||
* Fix reading `sharedInbox` from remote ActivityPub profiles. This caused public payloads not
|
||||
to be deduplicated when sending public payloads to remote ActivityPub servers. Refetching
|
||||
profiles should now fix this. ([related issue](https://git.feneas.org/jaywink/federation/issues/124))
|
||||
profiles should now fix this. ([related issue](https://git.feneas.org/jaywink/federation/issues/124))
|
||||
|
||||
* Don't always crash generating payloads if Django is installed but not configured.
|
||||
|
||||
* Don't try to relay AP payloads to Diaspora receivers and vice versa, for now, until cross-protocol
|
||||
relaying is supported.
|
||||
|
||||
|
||||
* Fix some characters stopping tags being identified ([related issue](https://git.feneas.org/socialhome/socialhome/-/issues/222))
|
||||
|
||||
* Fix tags separated by slashes being identified ([related issue](https://git.feneas.org/socialhome/socialhome/-/issues/198))
|
||||
|
@ -102,7 +323,7 @@
|
|||
* All outgoing entities are now validated before sending. This stops the sending of invalid
|
||||
entities to the network, for example a Share of a Post from ActivityPub to the Diaspora
|
||||
protocol network.
|
||||
|
||||
|
||||
### Fixed
|
||||
|
||||
* Allow ActivityPub HTTP Signature verification to pass if signature is at most 24 hours old.
|
||||
|
@ -154,7 +375,7 @@
|
|||
* Entities with `raw_content` now also contain a `_media_type` and `rendered_content`.
|
||||
|
||||
The default `_media_type` is `text/markdown` except for ActivityPub originating posts it defaults to `text/html`. If the ActivityPub payload contains a `source`, that mediaType will be used instead.
|
||||
|
||||
|
||||
* Host meta fetchers now support NodeInfo 2.1
|
||||
|
||||
### Changed
|
||||
|
@ -172,15 +393,15 @@
|
|||
* The high level inbound and outbound functions `inbound.handle_receive`, `outbound.handle_send` parameter `user` must now receive a `UserType` compatible object. This must have the attribute `id`, and for `handle_send` also `private_key`. If Diaspora support is required then also `handle` and `guid` should exist. The type can be found as a class in `types.UserType`.
|
||||
* The high level inbound function `inbound.handle_receive` first parameter has been changed to `request` which must be a `RequestType` compatible object. This must have the attribute `body` which corrresponds to the old `payload` parameter. For ActivityPub inbound requests the object must also contain `headers`, `method` and `url`.
|
||||
* The outbound function `outbound.handle_send` parameter `recipients` structure has changed. It must now be a list of dictionaries, containing at minimum the following: `endpoint` for the recipient endpoint, `fid` for the recipient federation ID (ActivityPub only), `protocol` for the protocol to use and `public` as a boolean whether the payload should be treated as visible to anyone.
|
||||
|
||||
|
||||
For Diaspora private deliveries, also a `public_key` is required containing the receiver public key. Note that passing in handles as recipients is not any more possible - always pass in a url for `endpoint`.
|
||||
* The outbound function `outbound.handle_create_payload` now requires an extra third parameter for the protocol to use. This function should rarely need to be called directly - use `handle_send` instead which can handle both ActivityPub and Diaspora protocols.
|
||||
* The `Image` base entity has been made more generic.
|
||||
|
||||
|
||||
The following were removed: `remote_path`, `remote_name`, `linked_type`, `linked_guid`, `public`.
|
||||
|
||||
|
||||
The following were added: `url`, `name`.
|
||||
|
||||
|
||||
* **Backwards incompatible.** Generator `RFC3033Webfinger` and the related `rfc3033_webfinger_view` have been renamed to `RFC7033Webfinger` and `rfc7033_webfinger_view` to reflect the right RFC number.
|
||||
|
||||
* Network helper utility `fetch_document` can now also take a dictionary of `headers`. They will be passed to the underlying `requests` method call as is.
|
||||
|
@ -220,7 +441,7 @@
|
|||
* Enable generating encrypted JSON payloads with the Diaspora protocol which adds private message support. ([related issue](https://github.com/jaywink/federation/issues/82))
|
||||
|
||||
JSON encrypted payload encryption and decryption is handled by the Diaspora `EncryptedPayload` class.
|
||||
|
||||
|
||||
* Add RFC7033 webfinger generator ([related issue](https://github.com/jaywink/federation/issues/108))
|
||||
|
||||
Also provided is a Django view and url configuration for easy addition into Django projects. Django is not a hard dependency of this library, usage of the Django view obviously requires installing Django itself. For configuration details see documentation.
|
||||
|
@ -232,33 +453,33 @@
|
|||
* Added new network utilities to fetch IP and country information from a host.
|
||||
|
||||
The country information is fetched using the free `ipdata.co` service. NOTE! This service is rate limited to 1500 requests per day.
|
||||
|
||||
|
||||
* Extract mentions from Diaspora payloads that have text content. The mentions will be available in the entity as `_mentions` which is a set of Diaspora ID's in URI format.
|
||||
|
||||
|
||||
### Changed
|
||||
|
||||
* Send outbound Diaspora payloads in new format. Remove possibility to generate legacy MagicEnvelope payloads. ([related issue](https://github.com/jaywink/federation/issues/82))
|
||||
|
||||
* **Backwards incompatible**. Refactor `handle_send` function
|
||||
|
||||
|
||||
Now handle_send high level outbound helper function also allows delivering private payloads using the Diaspora protocol. ([related issue](https://github.com/jaywink/federation/issues/82))
|
||||
|
||||
|
||||
The signature has changed. Parameter `recipients` should now be a list of recipients to delivery to. Each recipient should either be an `id` or a tuple of `(id, public key)`. If public key is provided, Diaspora protocol delivery will be made as an encrypted private delivery.
|
||||
|
||||
|
||||
* **Backwards incompatible**. Change `handle_create_payload` function signature.
|
||||
|
||||
Parameter `to_user` is now `to_user_key` and thus instead of an object containing the `key` attribute it should now be an RSA public key object instance. This simplifies things since we only need the key from the user, nothing else.
|
||||
|
||||
* Switch Diaspora protocol to send new style entities ([related issue](https://github.com/jaywink/federation/issues/59))
|
||||
|
||||
We've already accepted these on incoming payloads for a long time and so do all the other platforms now, so now we always send out entities with the new property names. This can break federation with really old servers that don't understand these keys yet.
|
||||
We've already accepted these on incoming payloads for a long time and so do all the other platforms now, so now we always send out entities with the new property names. This can break federation with really old servers that don't understand these keys yet.
|
||||
|
||||
### Fixed
|
||||
|
||||
* Change unquote method used when preparing Diaspora XML payloads for verification ([related issue](https://github.com/jaywink/federation/issues/115))
|
||||
|
||||
Some platforms deliver payloads not using the urlsafe base64 standard which caused problems when validating the unquoted signature. Ensure maximum compatibility by allowing non-standard urlsafe quoted payloads.
|
||||
|
||||
|
||||
* Fix for empty values in Diaspora protocol entities sometimes ending up as `None` instead of empty string when processing incoming payloads.
|
||||
|
||||
* Fix validation of `Retraction` with entity type `Share`
|
||||
|
@ -266,31 +487,31 @@
|
|||
* Allow port in Diaspora handles as per the protocol specification
|
||||
|
||||
Previously handles were validated like emails.
|
||||
|
||||
|
||||
* Fix Diaspora `Profile` mapping regarding `last_name` property
|
||||
|
||||
Previously only `first_name` was used when creating the `Profile.name` value. Now both `first_name` and `last_name` are used.
|
||||
|
||||
|
||||
When creating outgoing payloads, the `Profile.name` will still be placed in `first_name` to avoid trying to artificially split it.
|
||||
|
||||
|
||||
## [0.15.0] - 2018-02-12
|
||||
|
||||
### Added
|
||||
* Added base entity `Share` which maps to a `DiasporaReshare` for the Diaspora protocol. ([related issue](https://github.com/jaywink/federation/issues/94))
|
||||
|
||||
The `Share` entity supports all the properties that a Diaspora reshare does. Additionally two other properties are supported: `raw_content` and `entity_type`. The former can be used for a "quoted share" case where the sharer adds their own note to the share. The latter can be used to reference the type of object that was shared, to help the receiver, if it is not sharing a `Post` entity. The value must be a base entity class name.
|
||||
|
||||
|
||||
* Entities have two new properties: `id` and `target_id`.
|
||||
|
||||
Diaspora entity ID's are in the form of the [Diaspora URI scheme](https://diaspora.github.io/diaspora_federation/federation/diaspora_scheme.html), where it is possible to construct an ID from the entity. In the future, ActivityPub object ID's will be found in these properties.
|
||||
Diaspora entity ID's are in the form of the [Diaspora URI scheme](https://diaspora.github.io/diaspora_federation/federation/diaspora_scheme.html), where it is possible to construct an ID from the entity. In the future, ActivityPub object ID's will be found in these properties.
|
||||
|
||||
* New high level fetcher function `federation.fetchers.retrieve_remote_content`. ([related issue](https://github.com/jaywink/federation/issues/103))
|
||||
|
||||
This function takes the following parameters:
|
||||
|
||||
|
||||
* `id` - Object ID. For Diaspora, the only supported protocol at the moment, this is in the [Diaspora URI](https://diaspora.github.io/diaspora_federation/federation/diaspora_scheme.html) format.
|
||||
* `sender_key_fetcher` - Optional function that takes a profile `handle` and returns a public key in `str` format. If this is not given, the public key will be fetched from the remote profile over the network.
|
||||
|
||||
|
||||
The given ID will be fetched from the remote endpoint, validated to be from the correct author against their public key and then an instance of the entity class will be constructed and returned.
|
||||
|
||||
* New Diaspora protocol helpers in `federation.utils.diaspora`:
|
||||
|
@ -298,16 +519,16 @@
|
|||
* `retrieve_and_parse_content`. See notes regarding the high level fetcher above.
|
||||
* `fetch_public_key`. Given a `handle` as a parameter, will fetch the remote profile and return the `public_key` from it.
|
||||
* `parse_diaspora_uri`. Parses a Diaspora URI scheme string, returns either `None` if parsing fails or a `tuple` of `handle`, `entity_type` and `guid`.
|
||||
|
||||
|
||||
* Support fetching new style Diaspora protocol Webfinger (RFC 3033) ([related issue](https://github.com/jaywink/federation/issues/108))
|
||||
|
||||
The legaxy Webfinger is still used as fallback if the new Webfinger is not found.
|
||||
The legaxy Webfinger is still used as fallback if the new Webfinger is not found.
|
||||
|
||||
### Changed
|
||||
* Refactoring for Diaspora `MagicEnvelope` class.
|
||||
|
||||
The class init now also allows passing in parameters to construct and verify MagicEnvelope instances. The order of init parameters has not been changed, but they are now all optional. When creating a class instance, one should always pass in the necessary parameters depnding on whether the class instance will be used for building a payload or verifying an incoming payload. See class docstring for details.
|
||||
|
||||
|
||||
* Diaspora procotol receive flow now uses the `MagicEnvelope` class to verify payloads. No functional changes regarding verification otherwise.
|
||||
|
||||
* Diaspora protocol receive flow now fetches the sender public key over the network if a `sender_key_fetcher` function is not passed in. Previously an error would be raised.
|
||||
|
@ -329,9 +550,9 @@
|
|||
## [0.14.0] - 2017-08-06
|
||||
|
||||
### Security
|
||||
* Add proper checks to make sure Diaspora protocol payload handle and entity handle are the same. Even though we already verified the signature of the sender, we didn't ensure that the sender isn't trying to fake an entity authored by someone else.
|
||||
* Add proper checks to make sure Diaspora protocol payload handle and entity handle are the same. Even though we already verified the signature of the sender, we didn't ensure that the sender isn't trying to fake an entity authored by someone else.
|
||||
|
||||
The Diaspora protocol functions `message_to_objects` and `element_to_objects` now require a new parameter, the payload sender handle. These functions should normally not be needed to be used directly.
|
||||
The Diaspora protocol functions `message_to_objects` and `element_to_objects` now require a new parameter, the payload sender handle. These functions should normally not be needed to be used directly.
|
||||
|
||||
### Changed
|
||||
* **Breaking change.** The high level `federation.outbound` functions `handle_send` and `handle_create_payload` signatures have been changed. This has been done to better represent the objects that are actually sent in and to add an optional `parent_user` object.
|
||||
|
@ -341,7 +562,7 @@
|
|||
## [0.13.0] - 2017-07-22
|
||||
|
||||
### Backwards incompatible changes
|
||||
* When processing Diaspora payloads, entity used to get a `_source_object` stored to it. This was an `etree.Element` created from the source object. Due to serialization issues in applications (for example pushing the object to a task queue or saving to database), `_source_object` is now a byte string representation for the element done with `etree.tostring()`.
|
||||
* When processing Diaspora payloads, entity used to get a `_source_object` stored to it. This was an `etree.Element` created from the source object. Due to serialization issues in applications (for example pushing the object to a task queue or saving to database), `_source_object` is now a byte string representation for the element done with `etree.tostring()`.
|
||||
|
||||
### Added
|
||||
* New style Diaspora private encrypted JSON payloads are now supported in the receiving side. Outbound private Diaspora payloads are still sent as legacy encrypted payloads. ([issue](https://github.com/jaywink/federation/issues/83))
|
||||
|
@ -358,7 +579,7 @@
|
|||
|
||||
### Removed
|
||||
* `Post.photos` entity attribute was never used by any code and has been removed. Child entities of type `Image` are stored in the `Post._children` as before.
|
||||
* Removed deprecated user private key lookup using `user.key` in Diaspora receive processing. Passed in `user` objects must now have a `private_key` attribute.
|
||||
* Removed deprecated user private key lookup using `user.key` in Diaspora receive processing. Passed in `user` objects must now have a `private_key` attribute.
|
||||
|
||||
## [0.12.0] - 2017-05-22
|
||||
|
||||
|
@ -380,9 +601,9 @@
|
|||
|
||||
Diaspora protocol support added for `comment` and `like` relayable types. On inbound payloads the signature included in the payload will be verified against the sender public key. A failed verification will raise `SignatureVerificationError`. For outbound entities, the author private key will be used to add a signature to the payload.
|
||||
|
||||
This introduces some backwards incompatible changes to the way entities are processed. Diaspora entity mappers `get_outbound_entity` and entity utilities `get_full_xml_representation` now requires the author `private_key` as a parameter. This is required to sign outgoing `Comment` and `Reaction` (like) entities.
|
||||
This introduces some backwards incompatible changes to the way entities are processed. Diaspora entity mappers `get_outbound_entity` and entity utilities `get_full_xml_representation` now requires the author `private_key` as a parameter. This is required to sign outgoing `Comment` and `Reaction` (like) entities.
|
||||
|
||||
Additionally, Diaspora entity mappers `message_to_objects` and `element_to_objects` now take an optional `sender_key_fetcher` parameter. This must be a function that when called with the sender handle will return the sender public key. This allows using locally cached public keys instead of fetching them as needed. NOTE! If the function is not given, each processed payload will fetch the public key over the network.
|
||||
Additionally, Diaspora entity mappers `message_to_objects` and `element_to_objects` now take an optional `sender_key_fetcher` parameter. This must be a function that when called with the sender handle will return the sender public key. This allows using locally cached public keys instead of fetching them as needed. NOTE! If the function is not given, each processed payload will fetch the public key over the network.
|
||||
|
||||
A failed payload signature verification now raises a `SignatureVerificationError` instead of a less specific `AssertionError`.
|
||||
|
||||
|
@ -403,7 +624,7 @@ A failed payload signature verification now raises a `SignatureVerificationError
|
|||
## [0.10.1] - 2017-03-09
|
||||
|
||||
### Fixes
|
||||
* Ensure tags are lower cased after collecting them from entity `raw_content`.
|
||||
* Ensure tags are lower cased after collecting them from entity `raw_content`.
|
||||
|
||||
## [0.10.0] - 2017-01-28
|
||||
|
||||
|
@ -448,7 +669,7 @@ A failed payload signature verification now raises a `SignatureVerificationError
|
|||
|
||||
The name Social-Federation was really only an early project name which stuck. Since the beginning, the main module has been `federation`. It makes sense to unify these and also shorter names are generally nicer.
|
||||
|
||||
#### What do you need to do?
|
||||
#### What do you need to do?
|
||||
|
||||
Mostly nothing since the module was already called `federation`. Some things to note below:
|
||||
|
||||
|
@ -490,7 +711,7 @@ Mostly nothing since the module was already called `federation`. Some things to
|
|||
### Changed
|
||||
* Deprecate receiving user `key` attribute for Diaspora protocol. Instead correct attribute is now `private_key` for any user passed to `federation.inbound.handle_receive`. We already use `private_key` in the message creation code so this is just to unify the user related required attributes.
|
||||
* DEPRECATION: There is a fallback with `key` for user objects in the receiving payload part of the Diaspora protocol until 0.8.0.
|
||||
|
||||
|
||||
### Fixes
|
||||
* Loosen up hCard selectors when parsing profile from hCard document in `federation.utils.diaspora.parse_profile_from_hcard`. The selectors now match Diaspora upcoming federation documentation.
|
||||
|
||||
|
@ -499,7 +720,7 @@ Mostly nothing since the module was already called `federation`. Some things to
|
|||
### Breaking changes
|
||||
- `federation.outbound.handle_create_payload` parameter `to_user` is now optional. Public posts don't need a recipient. This also affects Diaspora protocol `build_send` method where the change is reflected similarly. [#43](https://github.com/jaywink/federation/pull/43)
|
||||
- In practise this means the signature has changed for `handle_create_payload` and `build_send` from **`from_user, to_user, entity`** to **`entity, from_user, to_user=None`**.
|
||||
|
||||
|
||||
### Added
|
||||
- `Post.provider_display_name` is now supported in the entity outbound/inbound mappers. [#44](https://github.com/jaywink/federation/pull/44)
|
||||
- Add utility method `federation.utils.network.send_document` which is just a wrapper around `requests.post`. User agent will be added to the headers and exceptions will be silently captured and returned instead. [#45](https://github.com/jaywink/federation/pull/45)
|
||||
|
|
19
README.md
19
README.md
|
@ -1,26 +1,27 @@
|
|||
[![pipeline status](https://git.feneas.org/jaywink/federation/badges/master/pipeline.svg)](https://git.feneas.org/jaywink/federation/commits/master) [![codecov.io](https://codecov.io/github/jaywink/federation/coverage.svg?branch=master)](https://codecov.io/github/jaywink/federation?branch=master) [![issue tracker](https://img.shields.io/badge/issue%20tracker-gitlab-orange.svg)](https://git.feneas.org/jaywink/federation/issues)
|
||||
[![pipeline status](https://gitlab.com/jaywink/federation/badges/master/pipeline.svg)](https://gitlab.com/jaywink/federation) [![issue tracker](https://img.shields.io/badge/issue%20tracker-gitlab-orange.svg)](https://gitlab.com/jaywink/federation/-/issues)
|
||||
|
||||
[![PyPI version](https://badge.fury.io/py/federation.svg)](https://pypi.python.org/pypi/federation) [![Documentation Status](http://readthedocs.org/projects/federation/badge/?version=latest)](http://federation.readthedocs.io/en/latest/?badge=latest) [![PyPI](https://img.shields.io/pypi/pyversions/federation.svg?maxAge=2592000)](https://pypi.python.org/pypi/federation) [![PyPI](https://img.shields.io/pypi/l/federation.svg?maxAge=2592000)](https://pypi.python.org/pypi/federation)
|
||||
|
||||
# federation
|
||||
|
||||
Python library to abstract social web federation protocols like ActivityPub and Diaspora.
|
||||
Python library to abstract social web federation protocols like ActivityPub, Diaspora and Matrix.
|
||||
|
||||
## Introduction
|
||||
|
||||
The aim of `federation` is to provide and abstract multiple social web protocols like
|
||||
ActivityPub and Diaspora in one package, over an easy to use and understand Python API.
|
||||
ActivityPub, Diaspora and Matrix in one package, over an easy to use and understand Python API.
|
||||
This way applications can be built to (almost) transparently support many protocols
|
||||
without the app builder having to know everything about those protocols.
|
||||
|
||||
![](http://federation.readthedocs.io/en/latest/_images/generic_diagram.png)
|
||||
![](./docs/_static/generic_diagram.png)
|
||||
|
||||
## Status
|
||||
|
||||
Currently two protocols are being focused on. Diaspora is considered to be stable with most
|
||||
of the protocol implemented. ActivityPub support should be considered as alpha - all the basic
|
||||
things work but there are likely to be a lot of compatibility issues with other ActivityPub
|
||||
implementations.
|
||||
Currently, three protocols are being focused on.
|
||||
|
||||
* Diaspora is considered to be stable with most of the protocol implemented.
|
||||
* ActivityPub is considered to be stable with working federation with most ActivityPub platforms.
|
||||
* Matrix support is in early phase and not to be considered useful yet.
|
||||
|
||||
The code base is well tested and in use in several projects. Backward incompatible changes
|
||||
will be clearly documented in changelog entries.
|
||||
|
@ -45,4 +46,4 @@ See [development and support documentation](http://federation.readthedocs.io/en/
|
|||
|
||||
### Author
|
||||
|
||||
Jason Robinson / https://jasonrobinson.me / https://git.feneas.org/jaywink / https://github.com/jaywink
|
||||
Jason Robinson / https://jasonrobinson.me / https://gitlab.com/jaywink / https://github.com/jaywink
|
||||
|
|
|
@ -23,7 +23,7 @@ arrow
|
|||
freezegun
|
||||
|
||||
# Django support
|
||||
django>=1.8,<2.3
|
||||
django>=3.2,<4
|
||||
pytest-django
|
||||
|
||||
# Releasing
|
||||
|
|
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Przed Szerokość: | Wysokość: | Rozmiar: 31 KiB Po Szerokość: | Wysokość: | Rozmiar: 36 KiB |
|
@ -2,7 +2,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from recommonmark.parser import CommonMarkParser
|
||||
|
||||
from federation import __version__
|
||||
__version__ = "0.25.1"
|
||||
|
||||
#
|
||||
# Federation documentation build configuration file, created by
|
||||
|
|
|
@ -3,14 +3,13 @@ Development
|
|||
|
||||
Help is more than welcome to extend this library. Please see the following resources.
|
||||
|
||||
* `Source code repo <https://git.feneas.org/jaywink/federation>`_
|
||||
* `Issue tracker <https://git.feneas.org/jaywink/federation/issues>`_
|
||||
* `Kanban board <https://git.feneas.org/jaywink/federation/boards>`_
|
||||
* `Source code repo <https://gitlab.com/jaywink/federation>`_
|
||||
* `Issue tracker <https://gitlab.com/jaywink/federation/-/issues>`_
|
||||
|
||||
Environment setup
|
||||
-----------------
|
||||
|
||||
Once you have your (Python 3.6+) virtualenv set up, install the development requirements::
|
||||
Once you have your (Python 3.7+) virtualenv set up, install the development requirements::
|
||||
|
||||
pip install -r dev-requirements.txt
|
||||
|
||||
|
@ -34,7 +33,6 @@ Built documentation is available at ``docs/_build/html/index.html``.
|
|||
Contact for help
|
||||
----------------
|
||||
|
||||
Easiest via Matrix on room ``#socialhome:feneas.org``. There is a bridged
|
||||
Freenode channel as well found at ``#socialhome``.
|
||||
Easiest via Matrix on room ``#socialhome:federator.dev``.
|
||||
|
||||
You can also ask questions or give feedback via issues.
|
||||
|
|
|
@ -6,5 +6,5 @@ Projects using federation
|
|||
For examples on how to integrate this library into your project, check these examples:
|
||||
|
||||
* `Socialhome <https://socialhome.network>`_ - a federated home page builder slash personal social network server with high emphasis on card style content visualization.
|
||||
* `Social-Relay <https://git.feneas.org/jaywink/social-relay>`_ - a reference server for the public content relay system that uses the Diaspora protocol.
|
||||
* `Social-Relay <https://github.com/jaywink/social-relay>`_ - a reference server for the public content relay system that uses the Diaspora protocol.
|
||||
* `The Federation info <https://the-federation.info>`_ - statistics and node list for the federated web.
|
||||
|
|
|
@ -11,10 +11,12 @@ without the app builder having to know everything about those protocols.
|
|||
Status
|
||||
------
|
||||
|
||||
Currently two protocols are being focused on. Diaspora is considered to be stable with most
|
||||
of the protocol implemented. ActivityPub support should be considered as alpha - all the basic
|
||||
things work but there are likely to be a lot of compatibility issues with other ActivityPub
|
||||
implementations.
|
||||
Currently three protocols are being focused on.
|
||||
|
||||
* Diaspora is considered to be stable with most of the protocol implemented.
|
||||
* ActivityPub support should be considered as beta - inbound payload are
|
||||
handled by a jsonld processor (calamus)
|
||||
* Matrix support cannot be considered usable as of yet.
|
||||
|
||||
The code base is well tested and in use in several projects. Backward incompatible changes
|
||||
will be clearly documented in changelog entries.
|
||||
|
@ -45,5 +47,5 @@ License
|
|||
Author
|
||||
......
|
||||
|
||||
Jason Robinson / `jasonrobinson.me <https://jasonrobinson.me>`_ / `GitLab <https://git.feneas.org/jaywink>`_ / `GitHub <https://github.com/jaywink>`_
|
||||
Jason Robinson / `jasonrobinson.me <https://jasonrobinson.me>`_ / `@jaywink:federator.dev <https://matrix.to/#/@jaywink:federator.dev>`_ / `GitLab <https://gitlab.com/jaywink>`_ / `GitHub <https://github.com/jaywink>`_
|
||||
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
Protocols
|
||||
=========
|
||||
|
||||
Currently two protocols are being focused on. Diaspora is considered to be stable with most
|
||||
of the protocol implemented. ActivityPub support should be considered as alpha - all the basic
|
||||
things work but there are likely to be a lot of compatibility issues with other ActivityPub
|
||||
implementations.
|
||||
Currently three protocols are being focused on.
|
||||
|
||||
* Diaspora is considered to be stable with most of the protocol implemented.
|
||||
* ActivityPub support should be considered as beta - all the basic
|
||||
things work and we are fixing incompatibilities as they are identified.
|
||||
* Matrix support cannot be considered usable as of yet.
|
||||
|
||||
For example implementations in real life projects check :ref:`example-projects`.
|
||||
|
||||
|
@ -45,9 +47,15 @@ Features currently supported:
|
|||
* Actor (Person outbound, Person, Organization, Service inbound)
|
||||
* Note, Article and Page (Create, Delete, Update)
|
||||
* These become a ``Post`` or ``Comment`` depending on ``inReplyTo``.
|
||||
* Attachment images from the above objects
|
||||
* Attachment images, (inbound only for audios and videos) from the above objects
|
||||
* Follow, Accept Follow, Undo Follow
|
||||
* Announce
|
||||
* Inbound Peertube Video objects translated as ``Post``.
|
||||
|
||||
* Inbound processing of reply collections, for platforms that implement it.
|
||||
* Link, Like, View, Signature, PropertyValue, IdentityProof and Emojis objects are only processed for inbound
|
||||
payloads currently. Outbound processing requires support by the client
|
||||
application.
|
||||
|
||||
Namespace
|
||||
.........
|
||||
|
@ -60,28 +68,81 @@ Content media type
|
|||
The following keys will be set on the entity based on the ``source`` property existing:
|
||||
|
||||
* if the object has an ``object.source`` property:
|
||||
* ``_media_type`` will be the source media type
|
||||
* ``_rendered_content`` will be the object ``content``
|
||||
* ``_media_type`` will be the source media type (only text/markdown is supported).
|
||||
* ``rendered_content`` will be the object ``content``
|
||||
* ``raw_content`` will be the source ``content``
|
||||
* if the object has no ``object.source`` property:
|
||||
* ``_media_type`` will be ``text/html``
|
||||
* ``_rendered_content`` will be the object ``content``
|
||||
* ``raw_content`` will object ``content`` run through a HTML2Markdown renderer
|
||||
* ``rendered_content`` will be the object ``content``
|
||||
* ``raw_content`` will be empty
|
||||
|
||||
The ``contentMap`` property is processed but content language selection is not implemented yet.
|
||||
|
||||
For outbound entities, ``raw_content`` is expected to be in ``text/markdown``,
|
||||
specifically CommonMark. When sending payloads, ``raw_content`` will be rendered via
|
||||
the ``commonmark`` library into ``object.content``. The original ``raw_content``
|
||||
will be added to the ``object.source`` property.
|
||||
specifically CommonMark. The client applications are expected to provide the
|
||||
rendered content for protocols that require it (e.g. ActivityPub).
|
||||
When sending payloads, ``object.contentMap`` will be set to ``rendered_content``
|
||||
and ``raw_content`` will be added to the ``object.source`` property.
|
||||
|
||||
Images
|
||||
Medias
|
||||
......
|
||||
|
||||
Any images referenced in the ``raw_content`` of outbound entities will be extracted
|
||||
into ``object.attachment`` objects, for receivers that don't support inline images.
|
||||
These attachments will have a ``pyfed:inlineImage`` property set to ``true`` to
|
||||
indicate the image has been extrated from the content. Receivers should ignore the
|
||||
into ``object.attachment`` object. For receivers that don't support inline images,
|
||||
image attachments will have a ``pyfed:inlineImage`` property set to ``true`` to
|
||||
indicate the image has been extracted from the content. Receivers should ignore the
|
||||
inline image attachments if they support showing ``<img>`` HTML tags or the markdown
|
||||
content in ``object.source``.
|
||||
content in ``object.source``. Outbound audio and video attachments currently lack
|
||||
support from client applications.
|
||||
|
||||
For inbound entities we do this automatically by not including received attachments in
|
||||
the entity ``_children`` attribute.
|
||||
For inbound entities we do this automatically by not including received image attachments in
|
||||
the entity ``_children`` attribute. Audio and video are passed through the client application.
|
||||
|
||||
Hashtags and mentions
|
||||
.....................
|
||||
|
||||
For outbound payloads, client applications must add/set the hashtag/mention value to
|
||||
the ``class`` attribute of rendered content linkified hashtags/mentions. These will be
|
||||
used to help build the corresponding ``Hashtag`` and ``Mention`` objects.
|
||||
|
||||
For inbound payloads, if a markdown source is provided, hashtags/mentions will be extracted
|
||||
through the same method used for Diaspora. If only HTML content is provided, the ``a`` tags
|
||||
will be marked with a ``data-[hashtag|mention]`` attribute (based on the provided Hashtag/Mention
|
||||
objects) to facilitate the ``href`` attribute modifications lient applications might
|
||||
wish to make. This should ensure links can be replaced regardless of how the HTML is structured.
|
||||
|
||||
.. _matrix:
|
||||
|
||||
Matrix
|
||||
------
|
||||
|
||||
The aim of Matrix support in this library is not to provide instant messaging but to wrap
|
||||
the parts of the Matrix protocol that specifically are especially useful for social media
|
||||
applications. The current ongoing work on `Ceruelan <https://matrix.org/blog/2020/12/18/introducing-cerulean>`_
|
||||
provides much of what will be implemented in this library.
|
||||
|
||||
This library doesn't aim to be a homeserver or provide any part of the server to server API.
|
||||
The plan is to provide an appservice to hook onto a separate homeserver that deals with all
|
||||
the complex protocol related details. This library will then aim to abstract much of what the
|
||||
appservice gives or takes behind the same API as is provided for the other protocols.
|
||||
|
||||
Currently support is being added, please visit back in future versions.
|
||||
|
||||
NOTE! Current features also assume Django is configured, though this is likely to not be
|
||||
the case in the future.
|
||||
|
||||
Appservice
|
||||
..........
|
||||
|
||||
To generate the appservice registration file you must ensure you've added the relevant
|
||||
configuration (see :ref:`usage-configuration`).
|
||||
|
||||
Then launch a Django shell inside your project and run the following:
|
||||
|
||||
::
|
||||
|
||||
from federation.protocols.matrix.appservice import print_registration_yaml
|
||||
print_registration_yaml()
|
||||
|
||||
This YAML needs to be registered with the linked Matrix homeserver as instructed in the
|
||||
relevant homeserver documentation.
|
||||
|
|
|
@ -37,7 +37,7 @@ passed back to the caller.
|
|||
For sending messages out, either base or protocol specific entities can be passed
|
||||
to the outbound senders.
|
||||
|
||||
If you need the correct protocol speficic entity class from the base entity,
|
||||
If you need the correct protocol specific entity class from the base entity,
|
||||
each protocol will define a ``get_outbound_entity`` function.
|
||||
|
||||
.. autofunction:: federation.entities.activitypub.mappers.get_outbound_entity
|
||||
|
@ -149,6 +149,8 @@ Generator classes
|
|||
.. autoclass:: federation.hostmeta.generators.DiasporaHostMeta
|
||||
.. autoclass:: federation.hostmeta.generators.DiasporaWebFinger
|
||||
.. autoclass:: federation.hostmeta.generators.DiasporaHCard
|
||||
.. autoclass:: federation.hostmeta.generators.MatrixClientWellKnown
|
||||
.. autoclass:: federation.hostmeta.generators.MatrixServerWellKnown
|
||||
.. autoclass:: federation.hostmeta.generators.NodeInfo
|
||||
.. autoclass:: federation.hostmeta.generators.RFC7033Webfinger
|
||||
.. autoclass:: federation.hostmeta.generators.SocialRelayWellKnown
|
||||
|
@ -189,8 +191,12 @@ It must be installed separately.
|
|||
|
||||
.. autofunction:: federation.entities.activitypub.django.views.activitypub_object_view
|
||||
.. autofunction:: federation.hostmeta.django.generators.rfc7033_webfinger_view
|
||||
.. autofunction:: federation.hostmeta.django.generators.matrix_client_wellknown_view
|
||||
.. autofunction:: federation.hostmeta.django.generators.matrix_server_wellknown_view
|
||||
.. autofunction:: federation.hostmeta.django.generators.nodeinfo2_view
|
||||
|
||||
.. _usage-configuration:
|
||||
|
||||
Configuration
|
||||
.............
|
||||
|
||||
|
@ -206,9 +212,11 @@ Some settings need to be set in Django settings. An example is below:
|
|||
|
||||
FEDERATION = {
|
||||
"base_url": "https://myserver.domain.tld,
|
||||
"federation_id": "https://example.com/u/john/",
|
||||
"get_object_function": "myproject.utils.get_object",
|
||||
"get_private_key_function": "myproject.utils.get_private_key",
|
||||
"get_profile_function": "myproject.utils.get_profile",
|
||||
"matrix_config_function": "myproject.utils.matrix_config_funct",
|
||||
"nodeinfo2_function": "myproject.utils.get_nodeinfo2_data",
|
||||
"process_payload_function": "myproject.utils.process_payload",
|
||||
"search_path": "/search/?q=",
|
||||
|
@ -216,9 +224,40 @@ Some settings need to be set in Django settings. An example is below:
|
|||
}
|
||||
|
||||
* ``base_url`` is the base URL of the server, ie protocol://domain.tld.
|
||||
* ``federation_id`` is a valid ActivityPub local profile id whose private key will be used to create the HTTP signature for GET requests to ActivityPub platforms.
|
||||
* ``get_object_function`` should be the full path to a function that will return the object matching the ActivityPub ID for the request object passed to this function.
|
||||
* ``get_private_key_function`` should be the full path to a function that will accept a federation ID (url, handle or guid) and return the private key of the user (as an RSA object). Required for example to sign outbound messages in some cases.
|
||||
* ``get_profile_function`` should be the full path to a function that should return a ``Profile`` entity. The function should take one or more keyword arguments: ``fid``, ``handle``, ``guid`` or ``request``. It should look up a profile with one or more of the provided parameters.
|
||||
* ``matrix_config_function`` (optional) function that returns a Matrix configuration dictionary, with the following objects:
|
||||
|
||||
::
|
||||
|
||||
{
|
||||
# Location of the homeserver (not server name)
|
||||
"homeserver_base_url": "https://matrix.domain.tld",
|
||||
# Homeserver domain and port (not server domain)
|
||||
"homeserver_domain_with_port": "matrix.domain.tld:443",
|
||||
# Homeserver name
|
||||
"homeserver_name": "domain.tld",
|
||||
# Appservice details
|
||||
"appservice": {
|
||||
# Unique ID to register with at the homeserver. Don't change this after creating.
|
||||
"id": "uniqueid",
|
||||
# Short code (a-z only), used for various things like namespacing
|
||||
"shortcode": "federatedapp",
|
||||
# Secret token for communication
|
||||
"token": "secret_token",
|
||||
},
|
||||
# (Optional) location of identity server
|
||||
"identity_server_base_url": "https://id.domain.tld",
|
||||
# (Optional) other keys to include in the client well-known (must be a dictionary)
|
||||
"client_wellknown_other_keys": {
|
||||
"org.foo.key" "barfoo",
|
||||
},
|
||||
# (Optional) registration shared secret
|
||||
"registration_shared_secret": "supersecretstring",
|
||||
}
|
||||
|
||||
* ``nodeinfo2_function`` (optional) function that returns data for generating a `NodeInfo2 document <https://github.com/jaywink/nodeinfo2>`_. Once configured the path ``/.well-known/x-nodeinfo2`` will automatically generate a NodeInfo2 document. The function should return a ``dict`` corresponding to the NodeInfo2 schema, with the following minimum items:
|
||||
|
||||
::
|
||||
|
@ -274,6 +313,11 @@ Diaspora
|
|||
.. autofunction:: federation.utils.diaspora.retrieve_diaspora_hcard
|
||||
.. autofunction:: federation.utils.diaspora.retrieve_diaspora_host_meta
|
||||
|
||||
Matrix
|
||||
......
|
||||
|
||||
.. autofunction:: federation.utils.matrix.register_dendrite_user
|
||||
|
||||
Network
|
||||
.......
|
||||
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
import importlib
|
||||
from typing import Union, TYPE_CHECKING, Any
|
||||
from types import ModuleType
|
||||
from typing import Union, TYPE_CHECKING
|
||||
|
||||
from federation.exceptions import NoSuitableProtocolFoundError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from federation.types import RequestType
|
||||
|
||||
__version__ = "0.21.0"
|
||||
__version__ = "0.25.1"
|
||||
|
||||
PROTOCOLS = (
|
||||
"activitypub",
|
||||
"diaspora",
|
||||
"matrix",
|
||||
)
|
||||
|
||||
|
||||
def identify_protocol(method, value):
|
||||
# type: (str, Union[str, RequestType]) -> str
|
||||
# type: (str, Union[str, RequestType]) -> ModuleType
|
||||
"""
|
||||
Loop through protocols, import the protocol module and try to identify the id or request.
|
||||
"""
|
||||
|
@ -27,10 +29,10 @@ def identify_protocol(method, value):
|
|||
raise NoSuitableProtocolFoundError()
|
||||
|
||||
|
||||
def identify_protocol_by_id(id: str):
|
||||
return identify_protocol('id', id)
|
||||
def identify_protocol_by_id(identifier: str) -> ModuleType:
|
||||
return identify_protocol('id', identifier)
|
||||
|
||||
|
||||
def identify_protocol_by_request(request):
|
||||
# type: (RequestType) -> Any
|
||||
# type: (RequestType) -> ModuleType
|
||||
return identify_protocol('request', request)
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
# noinspection PyPackageRequirements
|
||||
from django.conf.urls import url
|
||||
# noinspection PyPackageRequirements
|
||||
from django.urls import include
|
||||
|
||||
urlpatterns = [
|
||||
url(r'', include("federation.hostmeta.django.urls")),
|
||||
url(r'ap/', include("federation.entities.activitypub.django.urls")),
|
||||
url(r'^matrix/', include("federation.entities.matrix.django.urls")),
|
||||
]
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
import json
|
||||
from datetime import timedelta
|
||||
from pyld import jsonld
|
||||
|
||||
try:
|
||||
from federation.utils.django import get_redis
|
||||
cache = get_redis() or {}
|
||||
EXPIRATION = int(timedelta(weeks=4).total_seconds())
|
||||
except:
|
||||
cache = {}
|
||||
|
||||
|
||||
# This is required to workaround a bug in pyld that has the Accept header
|
||||
# accept other content types. From what I understand, precedence handling
|
||||
# is broken
|
||||
# from https://github.com/digitalbazaar/pyld/issues/133
|
||||
# cacheing loosely inspired by https://github.com/digitalbazaar/pyld/issues/70
|
||||
def get_loader(*args, **kwargs):
|
||||
requests_loader = jsonld.requests_document_loader(*args, **kwargs)
|
||||
|
||||
def loader(url, options={}):
|
||||
key = f'ld_cache:{url}'
|
||||
try:
|
||||
return json.loads(cache[key])
|
||||
except KeyError:
|
||||
options['headers']['Accept'] = 'application/ld+json'
|
||||
doc = requests_loader(url, options)
|
||||
if isinstance(cache, dict):
|
||||
cache[key] = json.dumps(doc)
|
||||
else:
|
||||
cache.set(key, json.dumps(doc), ex=EXPIRATION)
|
||||
return doc
|
||||
|
||||
return loader
|
||||
|
||||
|
||||
jsonld.set_document_loader(get_loader())
|
|
@ -1,14 +1,4 @@
|
|||
CONTEXT_ACTIVITYSTREAMS = "https://www.w3.org/ns/activitystreams"
|
||||
CONTEXT_DIASPORA = {"diaspora": "https://diasporafoundation.org/ns/"}
|
||||
CONTEXT_HASHTAG = {"Hashtag": "as:Hashtag"}
|
||||
CONTEXT_LD_SIGNATURES = "https://w3id.org/security/v1"
|
||||
CONTEXT_MANUALLY_APPROVES_FOLLOWERS = {"manuallyApprovesFollowers": "as:manuallyApprovesFollowers"}
|
||||
CONTEXT_PYTHON_FEDERATION = {"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"}
|
||||
CONTEXT_SENSITIVE = {"sensitive": "as:sensitive"}
|
||||
|
||||
CONTEXTS_DEFAULT = [
|
||||
CONTEXT_ACTIVITYSTREAMS,
|
||||
CONTEXT_PYTHON_FEDERATION,
|
||||
]
|
||||
CONTEXT_SECURITY = "https://w3id.org/security/v1"
|
||||
|
||||
NAMESPACE_PUBLIC = "https://www.w3.org/ns/activitystreams#Public"
|
||||
|
|
|
@ -1,8 +1,36 @@
|
|||
from cryptography.exceptions import InvalidSignature
|
||||
from django.http import JsonResponse, HttpResponse, HttpResponseNotFound
|
||||
|
||||
from federation.entities.activitypub.mappers import get_outbound_entity
|
||||
from federation.protocols.activitypub.protocol import Protocol
|
||||
from federation.types import RequestType
|
||||
from federation.utils.django import get_function_from_config
|
||||
|
||||
|
||||
def get_and_verify_signer(request):
|
||||
"""
|
||||
A remote user might be allowed to access retricted content
|
||||
if a valid signature is provided.
|
||||
|
||||
Only done for content.
|
||||
"""
|
||||
# TODO: revisit this when we start responding to sending follow[ing,ers] collections
|
||||
if request.path.startswith('/u/'): return None
|
||||
get_public_key = get_function_from_config('get_public_key_function')
|
||||
if not request.headers.get('Signature'): return None
|
||||
req = RequestType(
|
||||
url=request.build_absolute_uri(),
|
||||
body=request.body,
|
||||
method=request.method,
|
||||
headers=request.headers)
|
||||
protocol = Protocol(request=req, get_contact_key=get_public_key)
|
||||
try:
|
||||
protocol.verify()
|
||||
return protocol.sender
|
||||
except (ValueError, KeyError, InvalidSignature) as exc:
|
||||
return None
|
||||
|
||||
|
||||
def activitypub_object_view(func):
|
||||
"""
|
||||
Generic ActivityPub object view decorator.
|
||||
|
@ -27,11 +55,11 @@ def activitypub_object_view(func):
|
|||
return func(request, *args, **kwargs)
|
||||
|
||||
get_object_function = get_function_from_config('get_object_function')
|
||||
obj = get_object_function(request)
|
||||
obj = get_object_function(request, get_and_verify_signer(request))
|
||||
if not obj:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
as2_obj = obj.as_protocol('activitypub')
|
||||
|
||||
as2_obj = get_outbound_entity(obj, None)
|
||||
return JsonResponse(as2_obj.to_as2(), content_type='application/activity+json')
|
||||
|
||||
def post(request, *args, **kwargs):
|
||||
|
@ -44,7 +72,7 @@ def activitypub_object_view(func):
|
|||
|
||||
if request.method == 'GET':
|
||||
return get(request, *args, **kwargs)
|
||||
elif request.method == 'POST' and request.path.endswith('/inbox/'):
|
||||
elif request.method == 'POST' and request.path.startswith('/u/') and request.path.endswith('/inbox/'):
|
||||
return post(request, *args, **kwargs)
|
||||
|
||||
return HttpResponse(status=405)
|
||||
|
|
|
@ -1,390 +0,0 @@
|
|||
import logging
|
||||
import re
|
||||
import uuid
|
||||
from typing import Dict, List
|
||||
|
||||
import bleach
|
||||
|
||||
from federation.entities.activitypub.constants import (
|
||||
CONTEXTS_DEFAULT, CONTEXT_MANUALLY_APPROVES_FOLLOWERS, CONTEXT_SENSITIVE, CONTEXT_HASHTAG,
|
||||
CONTEXT_LD_SIGNATURES, CONTEXT_DIASPORA)
|
||||
from federation.entities.activitypub.enums import ActorType, ObjectType, ActivityType
|
||||
from federation.entities.base import Profile, Post, Follow, Accept, Comment, Retraction, Share, Image
|
||||
from federation.entities.mixins import RawContentMixin, BaseEntity, PublicMixin
|
||||
from federation.entities.utils import get_base_attributes
|
||||
from federation.outbound import handle_send
|
||||
from federation.types import UserType
|
||||
from federation.utils.django import get_configuration
|
||||
from federation.utils.text import with_slash, validate_handle
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
class AttachImagesMixin(RawContentMixin):
|
||||
def pre_send(self) -> None:
|
||||
"""
|
||||
Attach any embedded images from raw_content.
|
||||
"""
|
||||
super().pre_send()
|
||||
if self._media_type != "text/markdown":
|
||||
return
|
||||
regex = r"!\[([\w ]*)\]\((https?://[\w\d\-\./]+\.[\w]*((?<=jpg)|(?<=gif)|(?<=png)|(?<=jpeg)))\)"
|
||||
matches = re.finditer(regex, self.raw_content, re.MULTILINE | re.IGNORECASE)
|
||||
for match in matches:
|
||||
groups = match.groups()
|
||||
self._children.append(
|
||||
ActivitypubImage(
|
||||
url=groups[1],
|
||||
name=groups[0] or "",
|
||||
inline=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ActivitypubEntityMixin(BaseEntity):
|
||||
_type = None
|
||||
|
||||
@classmethod
|
||||
def from_base(cls, entity):
|
||||
# noinspection PyArgumentList
|
||||
return cls(**get_base_attributes(entity))
|
||||
|
||||
def to_string(self):
|
||||
# noinspection PyUnresolvedReferences
|
||||
return str(self.to_as2())
|
||||
|
||||
|
||||
class CleanContentMixin(RawContentMixin):
|
||||
def post_receive(self) -> None:
|
||||
"""
|
||||
Make linkified tags normal tags.
|
||||
"""
|
||||
super().post_receive()
|
||||
|
||||
def remove_tag_links(attrs, new=False):
|
||||
rel = (None, "rel")
|
||||
if attrs.get(rel) == "tag":
|
||||
return
|
||||
return attrs
|
||||
|
||||
self.raw_content = bleach.linkify(
|
||||
self.raw_content,
|
||||
callbacks=[remove_tag_links],
|
||||
parse_email=False,
|
||||
skip_tags=["code", "pre"],
|
||||
)
|
||||
|
||||
|
||||
class ActivitypubAccept(ActivitypubEntityMixin, Accept):
|
||||
_type = ActivityType.ACCEPT.value
|
||||
object: Dict = None
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"id": self.activity_id,
|
||||
"type": self._type,
|
||||
"actor": self.actor_id,
|
||||
"object": self.object,
|
||||
}
|
||||
return as2
|
||||
|
||||
|
||||
class ActivitypubNoteMixin(AttachImagesMixin, CleanContentMixin, PublicMixin, ActivitypubEntityMixin):
|
||||
_type = ObjectType.NOTE.value
|
||||
|
||||
def add_object_tags(self) -> List[Dict]:
|
||||
"""
|
||||
Populate tags to the object.tag list.
|
||||
"""
|
||||
tags = []
|
||||
try:
|
||||
config = get_configuration()
|
||||
except ImportError:
|
||||
tags_path = None
|
||||
else:
|
||||
if config["tags_path"]:
|
||||
tags_path = f"{config['base_url']}{config['tags_path']}"
|
||||
else:
|
||||
tags_path = None
|
||||
for tag in self.tags:
|
||||
_tag = {
|
||||
'type': 'Hashtag',
|
||||
'name': f'#{tag}',
|
||||
}
|
||||
if tags_path:
|
||||
_tag["href"] = tags_path.replace(":tag:", tag)
|
||||
tags.append(_tag)
|
||||
return tags
|
||||
|
||||
def extract_mentions(self):
|
||||
"""
|
||||
Extract mentions from the source object.
|
||||
"""
|
||||
super().extract_mentions()
|
||||
if not isinstance(self._source_object, dict):
|
||||
return
|
||||
source = self._source_object.get('object') if isinstance(self._source_object.get('object'), dict) else \
|
||||
self._source_object
|
||||
for tag in source.get('tag', []):
|
||||
if tag.get('type') == "Mention" and tag.get('href'):
|
||||
self._mentions.add(tag.get('href'))
|
||||
|
||||
def pre_send(self):
|
||||
super().pre_send()
|
||||
self.extract_mentions()
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT + [
|
||||
CONTEXT_HASHTAG,
|
||||
CONTEXT_LD_SIGNATURES,
|
||||
CONTEXT_SENSITIVE,
|
||||
],
|
||||
"type": self.activity.value,
|
||||
"id": self.activity_id,
|
||||
"actor": self.actor_id,
|
||||
"object": {
|
||||
"id": self.id,
|
||||
"type": self._type,
|
||||
"attributedTo": self.actor_id,
|
||||
"content": self.rendered_content,
|
||||
"published": self.created_at.isoformat(),
|
||||
"inReplyTo": None,
|
||||
"sensitive": True if "nsfw" in self.tags else False,
|
||||
"summary": None, # TODO Short text? First sentence? First line?
|
||||
"url": self.url,
|
||||
'source': {
|
||||
'content': self.raw_content,
|
||||
'mediaType': self._media_type,
|
||||
},
|
||||
"tag": [],
|
||||
},
|
||||
"published": self.created_at.isoformat(),
|
||||
}
|
||||
|
||||
if len(self._children):
|
||||
as2["object"]["attachment"] = []
|
||||
for child in self._children:
|
||||
as2["object"]["attachment"].append(child.to_as2())
|
||||
|
||||
if len(self._mentions):
|
||||
mentions = list(self._mentions)
|
||||
mentions.sort()
|
||||
for mention in mentions:
|
||||
if mention.startswith("http"):
|
||||
as2["object"]["tag"].append({
|
||||
'type': 'Mention',
|
||||
'href': mention,
|
||||
'name': mention,
|
||||
})
|
||||
elif validate_handle(mention):
|
||||
# Look up via WebFinger
|
||||
as2["object"]["tag"].append({
|
||||
'type': 'Mention',
|
||||
'href': mention, # TODO need to implement fetch via webfinger for AP handles first
|
||||
'name': mention,
|
||||
})
|
||||
|
||||
as2["object"]["tag"].extend(self.add_object_tags())
|
||||
|
||||
if self.guid:
|
||||
as2["@context"].append(CONTEXT_DIASPORA)
|
||||
as2["object"]["diaspora:guid"] = self.guid
|
||||
|
||||
return as2
|
||||
|
||||
|
||||
class ActivitypubComment(ActivitypubNoteMixin, Comment):
|
||||
def to_as2(self) -> Dict:
|
||||
as2 = super().to_as2()
|
||||
as2["object"]["inReplyTo"] = self.target_id
|
||||
return as2
|
||||
|
||||
|
||||
class ActivitypubFollow(ActivitypubEntityMixin, Follow):
|
||||
_type = ActivityType.FOLLOW.value
|
||||
|
||||
def post_receive(self) -> None:
|
||||
"""
|
||||
Post receive hook - send back follow ack.
|
||||
"""
|
||||
super().post_receive()
|
||||
if not self.following:
|
||||
return
|
||||
|
||||
from federation.utils.activitypub import retrieve_and_parse_profile # Circulars
|
||||
try:
|
||||
from federation.utils.django import get_function_from_config
|
||||
except ImportError:
|
||||
logger.warning("ActivitypubFollow.post_receive - Unable to send automatic Accept back, only supported on "
|
||||
"Django currently")
|
||||
return
|
||||
get_private_key_function = get_function_from_config("get_private_key_function")
|
||||
key = get_private_key_function(self.target_id)
|
||||
if not key:
|
||||
logger.warning("ActivitypubFollow.post_receive - Failed to send automatic Accept back: could not find "
|
||||
"profile to sign it with")
|
||||
return
|
||||
accept = ActivitypubAccept(
|
||||
activity_id=f"{self.target_id}#accept-{uuid.uuid4()}",
|
||||
actor_id=self.target_id,
|
||||
target_id=self.activity_id,
|
||||
object=self.to_as2(),
|
||||
)
|
||||
try:
|
||||
profile = retrieve_and_parse_profile(self.actor_id)
|
||||
except Exception:
|
||||
profile = None
|
||||
if not profile:
|
||||
logger.warning("ActivitypubFollow.post_receive - Failed to fetch remote profile for sending back Accept")
|
||||
return
|
||||
try:
|
||||
handle_send(
|
||||
accept,
|
||||
UserType(id=self.target_id, private_key=key),
|
||||
recipients=[{
|
||||
"endpoint": profile.inboxes["private"],
|
||||
"fid": self.actor_id,
|
||||
"protocol": "activitypub",
|
||||
"public": False,
|
||||
}],
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("ActivitypubFollow.post_receive - Failed to send Accept back")
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
if self.following:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"id": self.activity_id,
|
||||
"type": self._type,
|
||||
"actor": self.actor_id,
|
||||
"object": self.target_id,
|
||||
}
|
||||
else:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"id": self.activity_id,
|
||||
"type": ActivityType.UNDO.value,
|
||||
"actor": self.actor_id,
|
||||
"object": {
|
||||
"id": f"{self.actor_id}#follow-{uuid.uuid4()}",
|
||||
"type": self._type,
|
||||
"actor": self.actor_id,
|
||||
"object": self.target_id,
|
||||
},
|
||||
}
|
||||
return as2
|
||||
|
||||
|
||||
class ActivitypubImage(ActivitypubEntityMixin, Image):
|
||||
_type = ObjectType.IMAGE.value
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
return {
|
||||
"type": self._type,
|
||||
"url": self.url,
|
||||
"mediaType": self.media_type,
|
||||
"name": self.name,
|
||||
"pyfed:inlineImage": self.inline,
|
||||
}
|
||||
|
||||
|
||||
class ActivitypubPost(ActivitypubNoteMixin, Post):
|
||||
pass
|
||||
|
||||
|
||||
class ActivitypubProfile(ActivitypubEntityMixin, Profile):
|
||||
_type = ActorType.PERSON.value
|
||||
public = True
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT + [
|
||||
CONTEXT_LD_SIGNATURES,
|
||||
CONTEXT_MANUALLY_APPROVES_FOLLOWERS,
|
||||
],
|
||||
"endpoints": {
|
||||
"sharedInbox": self.inboxes["public"],
|
||||
},
|
||||
"followers": f"{with_slash(self.id)}followers/",
|
||||
"following": f"{with_slash(self.id)}following/",
|
||||
"id": self.id,
|
||||
"inbox": self.inboxes["private"],
|
||||
"manuallyApprovesFollowers": False,
|
||||
"name": self.name,
|
||||
"outbox": f"{with_slash(self.id)}outbox/",
|
||||
"publicKey": {
|
||||
"id": f"{self.id}#main-key",
|
||||
"owner": self.id,
|
||||
"publicKeyPem": self.public_key,
|
||||
},
|
||||
"type": self._type,
|
||||
"url": self.url,
|
||||
}
|
||||
if self.username:
|
||||
as2['preferredUsername'] = self.username
|
||||
if self.raw_content:
|
||||
as2['summary'] = self.raw_content
|
||||
if self.image_urls.get('large'):
|
||||
try:
|
||||
profile_icon = ActivitypubImage(url=self.image_urls.get('large'))
|
||||
if profile_icon.media_type:
|
||||
as2['icon'] = profile_icon.to_as2()
|
||||
except Exception as ex:
|
||||
logger.warning("ActivitypubProfile.to_as2 - failed to set profile icon: %s", ex)
|
||||
|
||||
if self.guid or self.handle:
|
||||
as2["@context"].append(CONTEXT_DIASPORA)
|
||||
if self.guid:
|
||||
as2["diaspora:guid"] = self.guid
|
||||
if self.handle:
|
||||
as2["diaspora:handle"] = self.handle
|
||||
|
||||
return as2
|
||||
|
||||
|
||||
class ActivitypubRetraction(ActivitypubEntityMixin, Retraction):
|
||||
def resolve_object_type(self):
|
||||
return {
|
||||
"Comment": ObjectType.TOMBSTONE.value,
|
||||
"Post": ObjectType.TOMBSTONE.value,
|
||||
"Share": ActivityType.ANNOUNCE.value,
|
||||
}.get(self.entity_type)
|
||||
|
||||
def resolve_type(self):
|
||||
return {
|
||||
"Comment": ActivityType.DELETE.value,
|
||||
"Post": ActivityType.DELETE.value,
|
||||
"Share": ActivityType.UNDO.value,
|
||||
}.get(self.entity_type)
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"id": self.activity_id,
|
||||
"type": self.resolve_type(),
|
||||
"actor": self.actor_id,
|
||||
"object": {
|
||||
"id": self.target_id,
|
||||
"type": self.resolve_object_type(),
|
||||
},
|
||||
"published": self.created_at.isoformat(),
|
||||
}
|
||||
return as2
|
||||
|
||||
|
||||
class ActivitypubShare(ActivitypubEntityMixin, Share):
|
||||
_type = ActivityType.ANNOUNCE.value
|
||||
|
||||
def to_as2(self) -> Dict:
|
||||
as2 = {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"id": self.activity_id,
|
||||
"type": self._type,
|
||||
"actor": self.actor_id,
|
||||
"object": self.target_id,
|
||||
"published": self.created_at.isoformat(),
|
||||
}
|
||||
return as2
|
|
@ -0,0 +1,161 @@
|
|||
import copy
|
||||
import json
|
||||
|
||||
from marshmallow import missing
|
||||
from pyld import jsonld
|
||||
|
||||
from federation.entities.activitypub.constants import CONTEXT_ACTIVITYSTREAMS, CONTEXT_SECURITY, NAMESPACE_PUBLIC
|
||||
|
||||
|
||||
# Extract context information from the metadata parameter defined for fields
|
||||
# that are not part of the official AP spec. Use the same extended context for
|
||||
# inbound payload. For outbound payload, build a context with only the required
|
||||
# extensions
|
||||
class LdContextManager:
|
||||
_named = [CONTEXT_ACTIVITYSTREAMS, CONTEXT_SECURITY]
|
||||
_extensions = {}
|
||||
_merged = []
|
||||
_models = []
|
||||
|
||||
def __init__(self, models):
|
||||
self._models = models
|
||||
for klass in models:
|
||||
self._extensions[klass] = {}
|
||||
ctx = getattr(klass, 'ctx', [])
|
||||
if ctx:
|
||||
self._extensions[klass].update({klass.__name__: ctx})
|
||||
for name, value in klass.schema().declared_fields.items():
|
||||
ctx = value.metadata.get('ctx') or []
|
||||
if ctx:
|
||||
self._extensions[klass].update({name: ctx})
|
||||
merged = {}
|
||||
for field in self._extensions.values():
|
||||
for ctx in field.values():
|
||||
self._add_extensions(ctx, self._named, merged)
|
||||
self._merged = copy.copy(self._named)
|
||||
self._merged.append(merged)
|
||||
|
||||
def _add_extensions(self, field, named, extensions):
|
||||
for item in field:
|
||||
if isinstance(item, str) and item not in named:
|
||||
named.append(item)
|
||||
elif isinstance(item, dict):
|
||||
extensions.update(item)
|
||||
|
||||
def _get_fields(self, obj):
|
||||
for klass in self._extensions.keys():
|
||||
if issubclass(type(obj), klass):
|
||||
return self._extensions[klass]
|
||||
return {}
|
||||
|
||||
def compact(self, obj):
|
||||
payload = jsonld.compact(obj.dump(), self.build_context(obj))
|
||||
patched = copy.copy(payload)
|
||||
|
||||
# This is for platforms that don't handle the single element array
|
||||
# compaction to a single value and https://www.w3.org/ns/activitystreams#Public
|
||||
# being compacted to as:Public
|
||||
def patch_payload(payload, patched):
|
||||
for field in ('attachment', 'cc', 'tag', 'to'):
|
||||
value = payload.get(field)
|
||||
if not value:
|
||||
continue
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
patched[field] = value
|
||||
if field in ('cc', 'to'):
|
||||
try:
|
||||
idx = value.index('as:Public')
|
||||
patched[field][idx] = value[idx].replace('as:Public', NAMESPACE_PUBLIC)
|
||||
except ValueError:
|
||||
pass
|
||||
if isinstance(payload.get('object'), dict):
|
||||
patch_payload(payload['object'], patched['object'])
|
||||
|
||||
patch_payload(payload, patched)
|
||||
return patched
|
||||
|
||||
def build_context(self, obj):
|
||||
from federation.entities.activitypub.models import Object, Link
|
||||
|
||||
final = [CONTEXT_ACTIVITYSTREAMS]
|
||||
extensions = {}
|
||||
|
||||
def walk_object(obj):
|
||||
if type(obj) in self._extensions.keys():
|
||||
self._add_extensions(self._extensions[type(obj)].get(type(obj).__name__, []), final, extensions)
|
||||
to_add = self._get_fields(obj)
|
||||
for field in type(obj).schema().declared_fields.keys():
|
||||
field_value = getattr(obj, field)
|
||||
if field in to_add.keys():
|
||||
if field_value is not missing or obj.signable and field == 'signature':
|
||||
self._add_extensions(to_add[field], final, extensions)
|
||||
if not isinstance(field_value, list):
|
||||
field_value = [field_value]
|
||||
for value in field_value:
|
||||
if issubclass(type(value), (Object, Link)):
|
||||
walk_object(value)
|
||||
|
||||
walk_object(obj)
|
||||
if extensions:
|
||||
final.append(extensions)
|
||||
# compact the array if len == 1 to minimize test changes
|
||||
return final if len(final) > 1 else final[0]
|
||||
|
||||
def merge_context(self, ctx):
|
||||
# One platform sends a single string context
|
||||
if isinstance(ctx, str):
|
||||
ctx = [ctx]
|
||||
|
||||
# add a # at the end of the python-federation string
|
||||
# for legacy socialhome payloads
|
||||
s = json.dumps(ctx)
|
||||
if 'python-federation"' in s:
|
||||
ctx = json.loads(s.replace('python-federation', 'python-federation#', 1))
|
||||
|
||||
# Some platforms have reference invalid json-ld document in @context.
|
||||
# Remove those.
|
||||
for url in ['http://joinmastodon.org/ns', 'http://schema.org']:
|
||||
try:
|
||||
ctx.pop(ctx.index(url))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# remove @language in context since this directive is not
|
||||
# processed by calamus. Pleroma adds a useless @language: 'und'
|
||||
# which is discouraged in best practices and in some cases makes
|
||||
# calamus return dict where str is expected.
|
||||
# see https://www.rfc-editor.org/rfc/rfc5646, page 56
|
||||
idx = []
|
||||
for i, v in enumerate(ctx):
|
||||
if isinstance(v, dict):
|
||||
v.pop('@language', None)
|
||||
if len(v) == 0:
|
||||
idx.insert(0, i)
|
||||
for i in idx:
|
||||
ctx.pop(i)
|
||||
|
||||
# Merge all defined AP extensions to the inbound context
|
||||
uris = []
|
||||
defs = {}
|
||||
# Merge original context dicts in one dict, taking into account nested @context
|
||||
def parse_context(ctx):
|
||||
for item in ctx:
|
||||
if isinstance(item, str):
|
||||
uris.append(item)
|
||||
else:
|
||||
if '@context' in item:
|
||||
parse_context([item['@context']])
|
||||
item.pop('@context')
|
||||
defs.update(item)
|
||||
parse_context(ctx)
|
||||
|
||||
for item in self._merged:
|
||||
if isinstance(item, str) and item not in uris:
|
||||
uris.append(item)
|
||||
elif isinstance(item, dict):
|
||||
defs.update(item)
|
||||
|
||||
final = copy.copy(uris)
|
||||
final.append(defs)
|
||||
return final
|
|
@ -0,0 +1,104 @@
|
|||
import datetime
|
||||
import logging
|
||||
import math
|
||||
import re
|
||||
from base64 import b64encode, b64decode
|
||||
from copy import copy
|
||||
from funcy import omit
|
||||
from pyld import jsonld
|
||||
|
||||
from Crypto.Hash import SHA256
|
||||
from Crypto.PublicKey.RSA import import_key
|
||||
from Crypto.Signature import pkcs1_15
|
||||
|
||||
from federation.entities.utils import get_profile
|
||||
from federation.utils.activitypub import retrieve_and_parse_document
|
||||
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
def create_ld_signature(obj, author):
|
||||
# Use models.Signature? Maybe overkill...
|
||||
sig = {
|
||||
'created': datetime.datetime.now(tz=datetime.timezone.utc).isoformat(timespec='seconds'),
|
||||
'creator': f'{author.id}#main-key',
|
||||
'@context': 'https://w3id.org/security/v1'
|
||||
}
|
||||
|
||||
try:
|
||||
private_key = import_key(author.private_key)
|
||||
except (ValueError, TypeError) as exc:
|
||||
logger.warning('ld_signature - %s', exc)
|
||||
return None
|
||||
signer = pkcs1_15.new(private_key)
|
||||
|
||||
sig_digest = hash(sig)
|
||||
obj_digest = hash(obj)
|
||||
digest = (sig_digest + obj_digest).encode('utf-8')
|
||||
|
||||
signature = signer.sign(SHA256.new(digest))
|
||||
sig.update({'type': 'RsaSignature2017', 'signatureValue': b64encode(signature).decode()})
|
||||
sig.pop('@context')
|
||||
|
||||
obj.update({'signature': sig})
|
||||
|
||||
|
||||
def verify_ld_signature(payload):
|
||||
"""
|
||||
Verify inbound payload LD signature
|
||||
"""
|
||||
signature = copy(payload.get('signature', None))
|
||||
if not signature:
|
||||
logger.warning('ld_signature - No signature in %s', payload.get("id", "the payload"))
|
||||
return None
|
||||
|
||||
# retrieve the author's public key
|
||||
profile = get_profile(key_id=signature.get('creator'))
|
||||
if not profile:
|
||||
profile = retrieve_and_parse_document(signature.get('creator'))
|
||||
if not profile:
|
||||
logger.warning('ld_signature - Failed to retrieve profile for %s', signature.get("creator"))
|
||||
return None
|
||||
try:
|
||||
pkey = import_key(profile.public_key)
|
||||
except ValueError as exc:
|
||||
logger.warning('ld_signature - %s', exc)
|
||||
return None
|
||||
verifier = pkcs1_15.new(pkey)
|
||||
|
||||
# Compute digests and verify signature
|
||||
sig = omit(signature, ('type', 'signatureValue'))
|
||||
sig.update({'@context': 'https://w3id.org/security/v1'})
|
||||
sig_digest = hash(sig)
|
||||
obj = omit(payload, 'signature')
|
||||
obj_digest = hash(obj)
|
||||
digest = (sig_digest + obj_digest).encode('utf-8')
|
||||
|
||||
try:
|
||||
sig_value = b64decode(signature.get('signatureValue'))
|
||||
verifier.verify(SHA256.new(digest), sig_value)
|
||||
logger.debug('ld_signature - %s has a valid signature', payload.get("id"))
|
||||
return profile.id
|
||||
except ValueError:
|
||||
logger.warning('ld_signature - Invalid signature for %s', payload.get("id"))
|
||||
return None
|
||||
|
||||
|
||||
def hash(obj):
|
||||
nquads = NormalizedDoubles().normalize(obj, options={'format': 'application/nquads', 'algorithm': 'URDNA2015'})
|
||||
return SHA256.new(nquads.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
# We need this to ensure the digests are identical.
|
||||
class NormalizedDoubles(jsonld.JsonLdProcessor):
|
||||
def _object_to_rdf(self, item, issuer, triples, rdfDirection):
|
||||
value = item['@value'] if jsonld._is_value(item) else None
|
||||
# The ruby rdf_normalize library turns floats with a zero fraction into integers.
|
||||
if isinstance(value, float) and value == math.floor(value):
|
||||
item['@value'] = math.floor(value)
|
||||
obj = super()._object_to_rdf(item, issuer, triples, rdfDirection)
|
||||
# This is to address https://github.com/digitalbazaar/pyld/issues/175
|
||||
if obj and obj.get('datatype') == jsonld.XSD_DOUBLE:
|
||||
obj['value'] = re.sub(r'(\d)0*E\+?(-)?0*(\d)', r'\1E\2\3', obj['value'])
|
||||
return obj
|
|
@ -1,175 +1,15 @@
|
|||
import logging
|
||||
from typing import List, Callable, Dict, Union, Optional
|
||||
|
||||
from federation.entities.activitypub.constants import NAMESPACE_PUBLIC
|
||||
from federation.entities.activitypub.entities import (
|
||||
ActivitypubFollow, ActivitypubProfile, ActivitypubAccept, ActivitypubPost, ActivitypubComment,
|
||||
ActivitypubRetraction, ActivitypubShare, ActivitypubImage)
|
||||
from federation.entities.base import Follow, Profile, Accept, Post, Comment, Retraction, Share, Image
|
||||
from federation.entities.activitypub.models import element_to_objects
|
||||
from federation.entities.base import Follow, Profile, Accept, Post, Comment, Retraction, Share, Image, Collection
|
||||
from federation.entities.mixins import BaseEntity
|
||||
from federation.types import UserType, ReceiverVariant
|
||||
import federation.entities.activitypub.models as models
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
MAPPINGS = {
|
||||
"Accept": ActivitypubAccept,
|
||||
"Announce": ActivitypubShare,
|
||||
"Application": ActivitypubProfile,
|
||||
"Article": ActivitypubPost,
|
||||
"Delete": ActivitypubRetraction,
|
||||
"Follow": ActivitypubFollow, # Technically not correct, but for now we support only following profiles
|
||||
"Group": ActivitypubProfile,
|
||||
"Image": ActivitypubImage,
|
||||
"Note": ActivitypubPost,
|
||||
"Organization": ActivitypubProfile,
|
||||
"Page": ActivitypubPost,
|
||||
"Person": ActivitypubProfile,
|
||||
"Service": ActivitypubProfile,
|
||||
}
|
||||
|
||||
OBJECTS = (
|
||||
"Application",
|
||||
"Article",
|
||||
"Group",
|
||||
"Image",
|
||||
"Note",
|
||||
"Organization",
|
||||
"Page",
|
||||
"Person",
|
||||
"Service",
|
||||
)
|
||||
|
||||
UNDO_MAPPINGS = {
|
||||
"Follow": ActivitypubFollow,
|
||||
"Announce": ActivitypubRetraction,
|
||||
}
|
||||
|
||||
|
||||
def element_to_objects(payload: Dict) -> List:
|
||||
"""
|
||||
Transform an Element to a list of entities.
|
||||
"""
|
||||
cls = None
|
||||
entities = []
|
||||
is_object = True if payload.get('type') in OBJECTS else False
|
||||
if payload.get('type') == "Delete":
|
||||
cls = ActivitypubRetraction
|
||||
elif payload.get('type') == "Undo":
|
||||
if isinstance(payload.get('object'), dict):
|
||||
cls = UNDO_MAPPINGS.get(payload["object"]["type"])
|
||||
elif isinstance(payload.get('object'), dict) and payload["object"].get('type'):
|
||||
if payload["object"]["type"] == "Note" and payload["object"].get("inReplyTo"):
|
||||
cls = ActivitypubComment
|
||||
else:
|
||||
cls = MAPPINGS.get(payload["object"]["type"])
|
||||
else:
|
||||
cls = MAPPINGS.get(payload.get('type'))
|
||||
if not cls:
|
||||
return []
|
||||
|
||||
transformed = transform_attributes(payload, cls, is_object=is_object)
|
||||
entity = cls(**transformed)
|
||||
# Add protocol name
|
||||
entity._source_protocol = "activitypub"
|
||||
# Save element object to entity for possible later use
|
||||
entity._source_object = payload
|
||||
# Extract receivers
|
||||
entity._receivers = extract_receivers(payload)
|
||||
# Extract children
|
||||
if payload.get("object") and isinstance(payload.get("object"), dict):
|
||||
# Try object if exists
|
||||
entity._children = extract_attachments(payload.get("object"))
|
||||
else:
|
||||
# Try payload itself
|
||||
entity._children = extract_attachments(payload)
|
||||
|
||||
if hasattr(entity, "post_receive"):
|
||||
entity.post_receive()
|
||||
|
||||
try:
|
||||
entity.validate()
|
||||
except ValueError as ex:
|
||||
logger.error("Failed to validate entity %s: %s", entity, ex, extra={
|
||||
"transformed": transformed,
|
||||
})
|
||||
return []
|
||||
# Extract mentions
|
||||
if hasattr(entity, "extract_mentions"):
|
||||
entity.extract_mentions()
|
||||
|
||||
entities.append(entity)
|
||||
|
||||
return entities
|
||||
|
||||
|
||||
def extract_attachments(payload: Dict) -> List[Image]:
|
||||
"""
|
||||
Extract images from attachments.
|
||||
|
||||
There could be other attachments, but currently we only extract images.
|
||||
"""
|
||||
attachments = []
|
||||
for item in payload.get('attachment', []):
|
||||
# noinspection PyProtectedMember
|
||||
if item.get("type") in ("Document", "Image") and item.get("mediaType") in Image._valid_media_types:
|
||||
if item.get('pyfed:inlineImage', False):
|
||||
# Skip this image as it's indicated to be inline in content and source already
|
||||
continue
|
||||
attachments.append(
|
||||
ActivitypubImage(
|
||||
url=item.get('url'),
|
||||
name=item.get('name') or "",
|
||||
media_type=item.get("mediaType"),
|
||||
)
|
||||
)
|
||||
return attachments
|
||||
|
||||
|
||||
def extract_receiver(payload: Dict, receiver: str) -> Optional[UserType]:
|
||||
"""
|
||||
Transform a single receiver ID to a UserType.
|
||||
"""
|
||||
actor = payload.get("actor") or payload.get("attributedTo") or ""
|
||||
if receiver == NAMESPACE_PUBLIC:
|
||||
# Ignore since we already store "public" as a boolean on the entity
|
||||
return
|
||||
# Check for this being a list reference to followers of an actor?
|
||||
# TODO: terrible hack! the way some platforms deliver to sharedInbox using just
|
||||
# the followers collection as a target is annoying to us since we would have to
|
||||
# store the followers collection references on application side, which we don't
|
||||
# want to do since it would make application development another step more complex.
|
||||
# So for now we're going to do a terrible assumption that
|
||||
# 1) if "followers" in ID and
|
||||
# 2) if ID starts with actor ID
|
||||
# then; assume this is the followers collection of said actor ID.
|
||||
# When we have a caching system, just fetch each receiver and check what it is.
|
||||
# Without caching this would be too expensive to do.
|
||||
elif receiver.find("followers") > -1 and receiver.startswith(actor):
|
||||
return UserType(id=actor, receiver_variant=ReceiverVariant.FOLLOWERS)
|
||||
# Assume actor ID
|
||||
return UserType(id=receiver, receiver_variant=ReceiverVariant.ACTOR)
|
||||
|
||||
|
||||
def extract_receivers(payload: Dict) -> List[UserType]:
|
||||
"""
|
||||
Exctract receivers from a payload.
|
||||
"""
|
||||
receivers = []
|
||||
for key in ("to", "cc"):
|
||||
receiver = payload.get(key)
|
||||
if isinstance(receiver, list):
|
||||
for item in receiver:
|
||||
extracted = extract_receiver(payload, item)
|
||||
if extracted:
|
||||
receivers.append(extracted)
|
||||
elif isinstance(receiver, str):
|
||||
extracted = extract_receiver(payload, receiver)
|
||||
if extracted:
|
||||
receivers.append(extracted)
|
||||
return receivers
|
||||
|
||||
|
||||
def get_outbound_entity(entity: BaseEntity, private_key):
|
||||
"""Get the correct outbound entity for this protocol.
|
||||
|
||||
|
@ -189,25 +29,36 @@ def get_outbound_entity(entity: BaseEntity, private_key):
|
|||
outbound = None
|
||||
cls = entity.__class__
|
||||
if cls in [
|
||||
ActivitypubAccept, ActivitypubFollow, ActivitypubProfile, ActivitypubPost, ActivitypubComment,
|
||||
ActivitypubRetraction, ActivitypubShare,
|
||||
]:
|
||||
models.Accept, models.Follow, models.Person, models.Note,
|
||||
models.Delete, models.Tombstone, models.Announce, models.Collection,
|
||||
models.OrderedCollection,
|
||||
] and isinstance(entity, BaseEntity):
|
||||
# Already fine
|
||||
outbound = entity
|
||||
elif cls == Accept:
|
||||
outbound = ActivitypubAccept.from_base(entity)
|
||||
outbound = models.Accept.from_base(entity)
|
||||
elif cls == Follow:
|
||||
outbound = ActivitypubFollow.from_base(entity)
|
||||
outbound = models.Follow.from_base(entity)
|
||||
elif cls == Post:
|
||||
outbound = ActivitypubPost.from_base(entity)
|
||||
elif cls == Profile:
|
||||
outbound = ActivitypubProfile.from_base(entity)
|
||||
elif cls == Retraction:
|
||||
outbound = ActivitypubRetraction.from_base(entity)
|
||||
outbound = models.Post.from_base(entity)
|
||||
elif cls == Comment:
|
||||
outbound = ActivitypubComment.from_base(entity)
|
||||
outbound = models.Comment.from_base(entity)
|
||||
elif cls == Profile:
|
||||
outbound = models.Person.from_base(entity)
|
||||
elif cls == Retraction:
|
||||
if entity.entity_type in ('Post', 'Comment'):
|
||||
outbound = models.Tombstone.from_base(entity)
|
||||
outbound.activity = models.Delete
|
||||
elif entity.entity_type == 'Share':
|
||||
outbound = models.Announce.from_base(entity)
|
||||
outbound.activity = models.Undo
|
||||
outbound._required.remove('id')
|
||||
elif entity.entity_type == 'Profile':
|
||||
outbound = models.Delete.from_base(entity)
|
||||
elif cls == Share:
|
||||
outbound = ActivitypubShare.from_base(entity)
|
||||
outbound = models.Announce.from_base(entity)
|
||||
elif cls == Collection:
|
||||
outbound = models.OrderedCollection.from_base(entity) if entity.ordered else models.Collection.from_base(entity)
|
||||
if not outbound:
|
||||
raise ValueError("Don't know how to convert this base entity to ActivityPub protocol entities.")
|
||||
# TODO LDS signing
|
||||
|
@ -233,103 +84,6 @@ def message_to_objects(
|
|||
Takes in a message extracted by a protocol and maps it to entities.
|
||||
"""
|
||||
# We only really expect one element here for ActivityPub.
|
||||
return element_to_objects(message)
|
||||
return element_to_objects(message, sender)
|
||||
|
||||
|
||||
def transform_attribute(
|
||||
key: str, value: Union[str, Dict, int], transformed: Dict, cls, is_object: bool, payload: Dict,
|
||||
) -> None:
|
||||
if value is None:
|
||||
value = ""
|
||||
if key == "id":
|
||||
if is_object:
|
||||
if cls == ActivitypubRetraction:
|
||||
transformed["target_id"] = value
|
||||
transformed["entity_type"] = "Object"
|
||||
else:
|
||||
transformed["id"] = value
|
||||
elif cls in (ActivitypubProfile, ActivitypubShare):
|
||||
transformed["id"] = value
|
||||
else:
|
||||
transformed["activity_id"] = value
|
||||
elif key == "actor":
|
||||
transformed["actor_id"] = value
|
||||
elif key == "attributedTo" and is_object:
|
||||
transformed["actor_id"] = value
|
||||
elif key in ("content", "source"):
|
||||
if payload.get('source') and isinstance(payload.get("source"), dict) and \
|
||||
payload.get('source').get('mediaType') == "text/markdown":
|
||||
transformed["_media_type"] = "text/markdown"
|
||||
transformed["raw_content"] = payload.get('source').get('content').strip()
|
||||
transformed["_rendered_content"] = payload.get('content').strip()
|
||||
else:
|
||||
# Assume HTML by convention
|
||||
transformed["_media_type"] = "text/html"
|
||||
transformed["raw_content"] = payload.get('content').strip()
|
||||
transformed["_rendered_content"] = transformed["raw_content"]
|
||||
elif key == "diaspora:guid":
|
||||
transformed["guid"] = value
|
||||
elif key == "endpoints" and isinstance(value, dict):
|
||||
if "inboxes" not in transformed:
|
||||
transformed["inboxes"] = {"private": None, "public": None}
|
||||
if value.get('sharedInbox'):
|
||||
transformed["inboxes"]["public"] = value.get("sharedInbox")
|
||||
elif key == "icon":
|
||||
# TODO maybe we should ditch these size constants and instead have a more flexible dict for images
|
||||
# so based on protocol there would either be one url or many by size name
|
||||
if isinstance(value, dict):
|
||||
transformed["image_urls"] = {
|
||||
"small": value['url'],
|
||||
"medium": value['url'],
|
||||
"large": value['url'],
|
||||
}
|
||||
else:
|
||||
transformed["image_urls"] = {
|
||||
"small": value,
|
||||
"medium": value,
|
||||
"large": value,
|
||||
}
|
||||
elif key == "inbox":
|
||||
if "inboxes" not in transformed:
|
||||
transformed["inboxes"] = {"private": None, "public": None}
|
||||
transformed["inboxes"]["private"] = value
|
||||
if not transformed["inboxes"]["public"]:
|
||||
transformed["inboxes"]["public"] = value
|
||||
elif key == "inReplyTo":
|
||||
transformed["target_id"] = value
|
||||
elif key == "name":
|
||||
transformed["name"] = value or ""
|
||||
elif key == "object" and not is_object:
|
||||
if isinstance(value, dict):
|
||||
if cls == ActivitypubAccept:
|
||||
transformed["target_id"] = value.get("id")
|
||||
elif cls == ActivitypubFollow:
|
||||
transformed["target_id"] = value.get("object")
|
||||
else:
|
||||
transform_attributes(value, cls, transformed, is_object=True)
|
||||
else:
|
||||
transformed["target_id"] = value
|
||||
elif key == "preferredUsername":
|
||||
transformed["username"] = value
|
||||
elif key == "publicKey":
|
||||
transformed["public_key"] = value.get('publicKeyPem', '')
|
||||
elif key == "summary" and cls == ActivitypubProfile:
|
||||
transformed["raw_content"] = value
|
||||
elif key in ("to", "cc"):
|
||||
if isinstance(value, list) and NAMESPACE_PUBLIC in value:
|
||||
transformed["public"] = True
|
||||
elif value == NAMESPACE_PUBLIC:
|
||||
transformed["public"] = True
|
||||
elif key == "type":
|
||||
if value == "Undo":
|
||||
transformed["following"] = False
|
||||
else:
|
||||
transformed[key] = value
|
||||
|
||||
|
||||
def transform_attributes(payload: Dict, cls, transformed: Dict = None, is_object: bool = False) -> Dict:
|
||||
if not transformed:
|
||||
transformed = {}
|
||||
for key, value in payload.items():
|
||||
transform_attribute(key, value, transformed, cls, is_object, payload)
|
||||
return transformed
|
||||
|
|
Plik diff jest za duży
Load Diff
|
@ -1,4 +1,6 @@
|
|||
from typing import Dict, Tuple
|
||||
from magic import from_file
|
||||
from mimetypes import guess_type
|
||||
|
||||
from dirty_validators.basic import Email
|
||||
|
||||
|
@ -6,7 +8,7 @@ from federation.entities.activitypub.enums import ActivityType
|
|||
from federation.entities.mixins import (
|
||||
PublicMixin, TargetIDMixin, ParticipationMixin, CreatedAtMixin, RawContentMixin, OptionalRawContentMixin,
|
||||
EntityTypeMixin, ProviderDisplayNameMixin, RootTargetIDMixin, BaseEntity)
|
||||
from federation.utils.network import fetch_content_type
|
||||
from federation.utils.network import fetch_content_type, fetch_file
|
||||
|
||||
|
||||
class Accept(CreatedAtMixin, TargetIDMixin, BaseEntity):
|
||||
|
@ -43,12 +45,25 @@ class Image(OptionalRawContentMixin, CreatedAtMixin, BaseEntity):
|
|||
self.media_type = self.get_media_type()
|
||||
|
||||
def get_media_type(self) -> str:
|
||||
media_type = fetch_content_type(self.url)
|
||||
media_type = guess_type(self.url)[0] or fetch_content_type(self.url)
|
||||
if media_type == 'application/octet-stream':
|
||||
try:
|
||||
file = fetch_file(self.url)
|
||||
media_type = from_file(file, mime=True)
|
||||
os.unlink(file)
|
||||
except:
|
||||
pass
|
||||
if media_type in self._valid_media_types:
|
||||
return media_type
|
||||
return ""
|
||||
|
||||
|
||||
class Audio(OptionalRawContentMixin, CreatedAtMixin, BaseEntity):
|
||||
pass
|
||||
|
||||
class Video(OptionalRawContentMixin, CreatedAtMixin, BaseEntity):
|
||||
pass
|
||||
|
||||
class Comment(RawContentMixin, ParticipationMixin, CreatedAtMixin, RootTargetIDMixin, BaseEntity):
|
||||
"""Represents a comment, linked to another object."""
|
||||
participation = "comment"
|
||||
|
@ -176,3 +191,18 @@ class Share(CreatedAtMixin, TargetIDMixin, EntityTypeMixin, OptionalRawContentMi
|
|||
share.
|
||||
"""
|
||||
entity_type = "Post"
|
||||
|
||||
|
||||
class Collection(BaseEntity):
|
||||
"""Represents collections of objects.
|
||||
|
||||
Only useful to Activitypub outbound payloads.
|
||||
"""
|
||||
ordered = False
|
||||
total_items = 0
|
||||
items = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._required.remove('actor_id')
|
||||
self._required += ['ordered']
|
||||
|
|
|
@ -6,8 +6,13 @@ from federation.entities.diaspora.mixins import DiasporaEntityMixin, DiasporaRel
|
|||
from federation.entities.diaspora.utils import format_dt, struct_to_xml
|
||||
from federation.utils.diaspora import get_private_endpoint, get_public_endpoint
|
||||
|
||||
class DiasporaMentionMixin:
|
||||
def pre_send(self):
|
||||
# add curly braces to mentions
|
||||
for mention in self._mentions:
|
||||
self.raw_content = self.raw_content.replace('@'+mention, '@{'+mention+'}')
|
||||
|
||||
class DiasporaComment(DiasporaRelayableMixin, Comment):
|
||||
class DiasporaComment(DiasporaMentionMixin, DiasporaRelayableMixin, Comment):
|
||||
"""Diaspora comment."""
|
||||
_tag_name = "comment"
|
||||
|
||||
|
@ -35,7 +40,7 @@ class DiasporaImage(DiasporaEntityMixin, Image):
|
|||
_tag_name = "photo"
|
||||
|
||||
|
||||
class DiasporaPost(DiasporaEntityMixin, Post):
|
||||
class DiasporaPost(DiasporaMentionMixin, DiasporaEntityMixin, Post):
|
||||
"""Diaspora post, ie status message."""
|
||||
_tag_name = "status_message"
|
||||
|
||||
|
|
|
@ -287,6 +287,8 @@ def get_outbound_entity(entity: BaseEntity, private_key: RsaKey):
|
|||
# in all situations but is apparently being removed.
|
||||
# TODO: remove this once Diaspora removes the extra signature
|
||||
outbound.parent_signature = outbound.signature
|
||||
if hasattr(outbound, "pre_send"):
|
||||
outbound.pre_send()
|
||||
# Validate the entity
|
||||
outbound.validate(direction="outbound")
|
||||
return outbound
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
# noinspection PyPackageRequirements
|
||||
from django.conf.urls import url
|
||||
# noinspection PyPackageRequirements
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
from federation.entities.matrix.django.views import MatrixASTransactionsView
|
||||
|
||||
urlpatterns = [
|
||||
url(
|
||||
regex=r"transactions/(?P<txn_id>[\w-]+)$",
|
||||
view=csrf_exempt(MatrixASTransactionsView.as_view()),
|
||||
name="matrix-as-transactions",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,38 @@
|
|||
import logging
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from django.http import JsonResponse
|
||||
# noinspection PyPackageRequirements
|
||||
from django.views import View
|
||||
|
||||
from federation.utils.django import get_function_from_config
|
||||
from federation.utils.matrix import get_matrix_configuration
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
class MatrixASBaseView(View):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
token = request.GET.get("access_token")
|
||||
if not token:
|
||||
return JsonResponse({"error": "M_FORBIDDEN"}, content_type='application/json', status=403)
|
||||
|
||||
matrix_config = get_matrix_configuration()
|
||||
if token != matrix_config["appservice"]["token"]:
|
||||
return JsonResponse({"error": "M_FORBIDDEN"}, content_type='application/json', status=403)
|
||||
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
class MatrixASTransactionsView(MatrixASBaseView):
|
||||
# noinspection PyUnusedLocal,PyMethodMayBeStatic
|
||||
def put(self, request, *args, **kwargs):
|
||||
# Inject the transaction ID to the request as part of the meta items
|
||||
request.META["matrix_transaction_id"] = kwargs.get("txn_id")
|
||||
process_payload_function = get_function_from_config('process_payload_function')
|
||||
result = process_payload_function(request)
|
||||
|
||||
if result:
|
||||
return JsonResponse({}, content_type='application/json', status=200)
|
||||
else:
|
||||
return JsonResponse({"error": "M_UNKNOWN"}, content_type='application/json', status=400)
|
|
@ -0,0 +1,369 @@
|
|||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
from typing import Dict, List, Optional
|
||||
from urllib.parse import quote
|
||||
from uuid import uuid4
|
||||
|
||||
import requests
|
||||
# noinspection PyPackageRequirements
|
||||
from slugify import slugify
|
||||
|
||||
from federation.entities.base import Post, Profile
|
||||
from federation.entities.matrix.enums import EventType
|
||||
from federation.entities.mixins import BaseEntity
|
||||
from federation.entities.utils import get_base_attributes, get_profile
|
||||
from federation.utils.django import get_configuration
|
||||
from federation.utils.matrix import get_matrix_configuration, appservice_auth_header
|
||||
from federation.utils.network import fetch_document, fetch_file
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
class MatrixEntityMixin(BaseEntity):
|
||||
_event_type: str = None
|
||||
_payloads: List[Dict] = []
|
||||
_profile_room_id = None
|
||||
_txn_id: str = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# We always require an mxid
|
||||
self._required.append('mxid')
|
||||
# Create a transaction ID
|
||||
self._txn_id = str(uuid4())
|
||||
|
||||
@property
|
||||
def event_type(self) -> str:
|
||||
return self._event_type
|
||||
|
||||
@classmethod
|
||||
def from_base(cls, entity):
|
||||
# type: (BaseEntity) -> MatrixEntityMixin
|
||||
# noinspection PyArgumentList
|
||||
return cls(**get_base_attributes(entity))
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_endpoint(self) -> str:
|
||||
config = get_matrix_configuration()
|
||||
return f"{config['homeserver_base_url']}/_matrix/client/r0"
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def get_endpoint_media(self) -> str:
|
||||
config = get_matrix_configuration()
|
||||
return f"{config['homeserver_base_url']}/_matrix/media/r0"
|
||||
|
||||
def get_profile_room_id(self):
|
||||
# TODO: we should cache these.
|
||||
doc, status, error = fetch_document(
|
||||
url=f"{self.get_endpoint()}/directory/room/{self.profile_room_alias_url_safe}",
|
||||
extra_headers=appservice_auth_header(),
|
||||
)
|
||||
if status == 200:
|
||||
data = json.loads(doc)
|
||||
self._profile_room_id = data["room_id"]
|
||||
|
||||
# noinspection PyMethodMayBeStatic
|
||||
def payloads(self) -> List[Dict]:
|
||||
return self._payloads
|
||||
|
||||
@property
|
||||
def profile_room_alias(self):
|
||||
return f"#{self.mxid}"
|
||||
|
||||
@property
|
||||
def profile_room_alias_url_safe(self):
|
||||
return f"{quote(self.profile_room_alias)}"
|
||||
|
||||
@property
|
||||
def server_name(self) -> str:
|
||||
config = get_matrix_configuration()
|
||||
return config['homeserver_name']
|
||||
|
||||
@property
|
||||
def txn_id(self) -> str:
|
||||
return self._txn_id
|
||||
|
||||
|
||||
class MatrixRoomMessage(Post, MatrixEntityMixin):
|
||||
_event_type = EventType.ROOM_MESSAGE.value
|
||||
_thread_room_event_id: str = None
|
||||
_thread_room_id: str = None
|
||||
|
||||
def add_tag_room_payloads(self, tag_room_id: str):
|
||||
self._payloads.append({
|
||||
"endpoint": f"{super().get_endpoint()}/rooms/{tag_room_id}/join?user_id={self.mxid}",
|
||||
"payload": {},
|
||||
})
|
||||
self._payloads.append({
|
||||
# TODO at some point we'll need to track the event_id's, for now just random
|
||||
# When we start listening to events from the other side, we'll need to filter
|
||||
# the ones we sent. Additionally if there is going to be some kind of symlink MSC,
|
||||
# we're going to want to stop carbon copying to many rooms.
|
||||
"endpoint": f"{super().get_endpoint()}/rooms/{tag_room_id}/send/{self.event_type}/"
|
||||
f"{str(uuid4())}?user_id={self.mxid}",
|
||||
"payload": {
|
||||
"body": self.raw_content,
|
||||
"msgtype": "m.text",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": self.rendered_content,
|
||||
},
|
||||
"method": "put",
|
||||
})
|
||||
|
||||
def create_tag_room(self, tag: str) -> str:
|
||||
headers = appservice_auth_header()
|
||||
config = get_configuration()
|
||||
topic = f"Content for the tag #{tag}."
|
||||
if config.get("tags_path"):
|
||||
topic += f" Mirrored from {config['base_url']}{config['tags_path'].replace(':tag:', slugify(tag))}"
|
||||
matrix_config = get_matrix_configuration()
|
||||
response = requests.post(
|
||||
url=f"{super().get_endpoint()}/createRoom",
|
||||
json={
|
||||
"preset": "public_chat",
|
||||
"name": f"#{tag} ({matrix_config['appservice']['shortcode']} | {matrix_config['homeserver_name']})",
|
||||
"room_alias_name": self.get_tag_room_alias_localpart(tag).strip('#'),
|
||||
"topic": topic,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
room_id = response.json()["room_id"]
|
||||
self._payloads.append({
|
||||
"endpoint": f"{super().get_endpoint()}/directory/list/room/{room_id}",
|
||||
"payload": {
|
||||
"visibility": "public",
|
||||
},
|
||||
"method": "put",
|
||||
})
|
||||
return room_id
|
||||
|
||||
def create_thread_room(self):
|
||||
headers = appservice_auth_header()
|
||||
# Create the thread room
|
||||
response = requests.post(
|
||||
url=f"{super().get_endpoint()}/createRoom?user_id={self.mxid}",
|
||||
json={
|
||||
# TODO auto-invite other recipients if private chat
|
||||
"preset": "public_chat" if self.public else "private_chat",
|
||||
"name": f"Thread by {self.mxid}",
|
||||
"topic": self.url,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
self._thread_room_id = response.json()["room_id"]
|
||||
# Send the thread message
|
||||
response = requests.put(
|
||||
url=f"{super().get_endpoint()}/rooms/{self._thread_room_id}/send/{self.event_type}/"
|
||||
f"{str(uuid4())}?user_id={self.mxid}",
|
||||
json={
|
||||
"body": self.raw_content,
|
||||
"msgtype": "m.text",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": self.rendered_content,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
self._thread_room_event_id = response.json()["event_id"]
|
||||
|
||||
def get_profile_room_id(self):
|
||||
super().get_profile_room_id()
|
||||
if not self._profile_room_id:
|
||||
from federation.entities.matrix.mappers import get_outbound_entity
|
||||
# Need to also create the profile
|
||||
profile = get_profile(fid=self.actor_id)
|
||||
profile_entity = get_outbound_entity(profile, None)
|
||||
payloads = profile_entity.payloads()
|
||||
if payloads:
|
||||
self._payloads.extend(payloads)
|
||||
|
||||
@staticmethod
|
||||
def get_tag_room_alias_localpart(tag: str) -> str:
|
||||
config = get_matrix_configuration()
|
||||
return f"#_{config['appservice']['shortcode']}_#{slugify(tag)}"
|
||||
|
||||
def get_tag_room_alias_url_safe(self, tag: str) -> str:
|
||||
return quote(f"{self.get_tag_room_alias_localpart(tag)}:{self.server_name}")
|
||||
|
||||
def get_tag_room_id(self, tag: str) -> Optional[str]:
|
||||
# TODO: we should cache these.
|
||||
doc, status, error = fetch_document(
|
||||
url=f"{self.get_endpoint()}/directory/room/{self.get_tag_room_alias_url_safe(tag)}",
|
||||
extra_headers=appservice_auth_header(),
|
||||
)
|
||||
if status == 200:
|
||||
data = json.loads(doc)
|
||||
return data["room_id"]
|
||||
|
||||
def payloads(self) -> List[Dict]:
|
||||
payloads = super().payloads()
|
||||
payloads.append({
|
||||
"endpoint": f"{super().get_endpoint()}/rooms/{self._profile_room_id}/send/{self.event_type}/"
|
||||
f"{self.txn_id}?user_id={self.mxid}",
|
||||
"payload": {
|
||||
"body": self.raw_content,
|
||||
"msgtype": "m.text",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": self.rendered_content,
|
||||
# Fields to emulate Cerulean
|
||||
"org.matrix.cerulean.event_id": self._thread_room_event_id,
|
||||
"org.matrix.cerulean.room_id": self._thread_room_id,
|
||||
"org.matrix.cerulean.root": True,
|
||||
},
|
||||
"method": "put",
|
||||
})
|
||||
# Tag the thread room as low priority
|
||||
payloads.append({
|
||||
"endpoint": f"{super().get_endpoint()}/user/{self.mxid}/rooms/{self._thread_room_id}/tags/m.lowpriority"
|
||||
f"?user_id={self.mxid}",
|
||||
"payload": {
|
||||
"order": 0,
|
||||
},
|
||||
"method": "put",
|
||||
})
|
||||
return payloads
|
||||
|
||||
def pre_send(self):
|
||||
"""
|
||||
Do various pre-send things.
|
||||
"""
|
||||
super().pre_send()
|
||||
# Get profile room ID
|
||||
self.get_profile_room_id()
|
||||
# Upload embedded images and replace the HTTP urls in the message with MXC urls so clients show the images
|
||||
self.upload_embedded_images()
|
||||
# Create thread room
|
||||
self.create_thread_room()
|
||||
# Process tags if public post
|
||||
if self.public:
|
||||
for tag in self.tags:
|
||||
tag_room_id = self.get_tag_room_id(tag)
|
||||
if not tag_room_id:
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
tag_room_id = self.create_tag_room(tag)
|
||||
except Exception as ex:
|
||||
logger.warning("Failed to create tag room for tag %s for post %s: %s", tag, self.id, ex)
|
||||
continue
|
||||
self.add_tag_room_payloads(tag_room_id)
|
||||
|
||||
def upload_embedded_images(self):
|
||||
"""
|
||||
Upload embedded images
|
||||
|
||||
Replaces the HTTP urls in the message with MXC urls so that Matrix clients will show the images.
|
||||
"""
|
||||
for image in self.embedded_images:
|
||||
url, name = image
|
||||
headers = appservice_auth_header()
|
||||
content_type, _encoding = mimetypes.guess_type(url)
|
||||
headers["Content-Type"] = content_type
|
||||
# Random name if none
|
||||
if not name:
|
||||
name = f"{uuid4()}{mimetypes.guess_extension(content_type, strict=False)}"
|
||||
# Need to fetch it locally first
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
image_file = fetch_file(url=url, timeout=60)
|
||||
except Exception as ex:
|
||||
logger.warning("MatrixRoomMessage.pre_send | Failed to retrieve image %s to be uploaded: %s",
|
||||
url, ex)
|
||||
continue
|
||||
# Then upload
|
||||
headers["Content-Length"] = str(os.stat(image_file).st_size)
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
with open(image_file, "rb") as f:
|
||||
response = requests.post(
|
||||
f"{super().get_endpoint_media()}/upload?filename={quote(name)}&user_id={self.mxid}",
|
||||
data=f.read(),
|
||||
headers=headers,
|
||||
timeout=60,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except Exception as ex:
|
||||
logger.warning("MatrixRoomMessage.pre_send | Failed to upload image %s: %s",
|
||||
url, ex)
|
||||
continue
|
||||
finally:
|
||||
os.unlink(image_file)
|
||||
# Replace in raw content
|
||||
try:
|
||||
logger.debug("MatrixRoomMessage.pre_send | Got response %s", response.json())
|
||||
content_uri = response.json()["content_uri"]
|
||||
self.raw_content = self.raw_content.replace(url, content_uri)
|
||||
except Exception as ex:
|
||||
logger.error("MatrixRoomMessage.pre_send | Failed to find content_uri from the image upload "
|
||||
"response: %s", ex)
|
||||
|
||||
|
||||
class MatrixProfile(Profile, MatrixEntityMixin):
|
||||
_remote_profile_create_needed = False
|
||||
_remote_room_create_needed = False
|
||||
|
||||
def create_profile_room(self):
|
||||
headers = appservice_auth_header()
|
||||
response = requests.post(
|
||||
url=f"{super().get_endpoint()}/createRoom?user_id={self.mxid}",
|
||||
json={
|
||||
"name": self.name,
|
||||
"preset": "public_chat" if self.public else "private_chat",
|
||||
"room_alias_name": f"@{self.localpart}",
|
||||
"topic": f"Profile room of {self.url}",
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
self._profile_room_id = response.json()["room_id"]
|
||||
|
||||
def register_user(self):
|
||||
headers = appservice_auth_header()
|
||||
response = requests.post(
|
||||
url=f"{super().get_endpoint()}/register",
|
||||
json={
|
||||
"username": f"{self.localpart}",
|
||||
"type": "m.login.application_service",
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
@property
|
||||
def localpart(self) -> str:
|
||||
return self.mxid.replace("@", "").replace(f":{self.server_name}", "")
|
||||
|
||||
def payloads(self) -> List[Dict]:
|
||||
payloads = super().payloads()
|
||||
if self._remote_profile_create_needed:
|
||||
self.register_user()
|
||||
if self._remote_room_create_needed:
|
||||
self.create_profile_room()
|
||||
payloads.append({
|
||||
"endpoint": f"{super().get_endpoint()}/profile/{self.mxid}/displayname?user_id={self.mxid}",
|
||||
"payload": {
|
||||
"displayname": self.name,
|
||||
},
|
||||
"method": "put",
|
||||
})
|
||||
# TODO avatar url in mxc format
|
||||
return payloads
|
||||
|
||||
def pre_send(self):
|
||||
"""
|
||||
Check whether we need to create the user or their profile room.
|
||||
"""
|
||||
doc, status, error = fetch_document(
|
||||
url=f"{super().get_endpoint()}/profile/{self.mxid}",
|
||||
extra_headers=appservice_auth_header(),
|
||||
)
|
||||
if status != 200:
|
||||
self._remote_profile_create_needed = True
|
||||
else:
|
||||
self.get_profile_room_id()
|
||||
|
||||
if self._remote_profile_create_needed or not self._profile_room_id:
|
||||
self._remote_room_create_needed = True
|
|
@ -0,0 +1,11 @@
|
|||
from enum import Enum
|
||||
|
||||
|
||||
class EnumBase(Enum):
|
||||
@classmethod
|
||||
def values(cls):
|
||||
return [value.value for value in cls.__members__.values()]
|
||||
|
||||
|
||||
class EventType(EnumBase):
|
||||
ROOM_MESSAGE = "m.room.message"
|
|
@ -0,0 +1,39 @@
|
|||
import logging
|
||||
|
||||
from federation.entities.base import Profile, Post
|
||||
from federation.entities.matrix.entities import MatrixRoomMessage, MatrixProfile
|
||||
from federation.entities.mixins import BaseEntity
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
def get_outbound_entity(entity: BaseEntity, private_key):
|
||||
"""Get the correct outbound entity for this protocol.
|
||||
|
||||
:arg entity: An entity instance which can be of a base or protocol entity class.
|
||||
:arg private_key: Private key of sender in str format
|
||||
:returns: Protocol specific entity class instance.
|
||||
:raises ValueError: If conversion cannot be done.
|
||||
"""
|
||||
if getattr(entity, "outbound_doc", None):
|
||||
# If the entity already has an outbound doc, just return the entity as is
|
||||
return entity
|
||||
outbound = None
|
||||
cls = entity.__class__
|
||||
if cls in [
|
||||
MatrixProfile,
|
||||
MatrixRoomMessage,
|
||||
]:
|
||||
# Already fine
|
||||
outbound = entity
|
||||
elif cls == Post:
|
||||
outbound = MatrixRoomMessage.from_base(entity)
|
||||
elif cls == Profile:
|
||||
outbound = MatrixProfile.from_base(entity)
|
||||
if not outbound:
|
||||
raise ValueError("Don't know how to convert this base entity to Matrix protocol entities.")
|
||||
if hasattr(outbound, "pre_send"):
|
||||
outbound.pre_send()
|
||||
# Validate the entity
|
||||
outbound.validate(direction="outbound")
|
||||
return outbound
|
|
@ -2,13 +2,16 @@ import datetime
|
|||
import importlib
|
||||
import re
|
||||
import warnings
|
||||
from typing import List, Set, Union, Dict
|
||||
from typing import List, Set, Union, Dict, Tuple
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from commonmark import commonmark
|
||||
from markdownify import markdownify
|
||||
from marshmallow import missing
|
||||
|
||||
from federation.entities.activitypub.enums import ActivityType
|
||||
from federation.entities.utils import get_name_for_profile
|
||||
from federation.utils.text import process_text_links, find_tags
|
||||
from federation.entities.utils import get_name_for_profile, get_profile
|
||||
from federation.utils.text import find_elements, find_tags, MENTION_PATTERN
|
||||
|
||||
|
||||
class BaseEntity:
|
||||
|
@ -19,7 +22,9 @@ class BaseEntity:
|
|||
_source_protocol: str = ""
|
||||
# Contains the original object from payload as a string
|
||||
_source_object: Union[str, Dict] = None
|
||||
_sender: str = ""
|
||||
_sender_key: str = ""
|
||||
_tags: Set = None
|
||||
# ActivityType
|
||||
activity: ActivityType = None
|
||||
activity_id: str = ""
|
||||
|
@ -28,30 +33,39 @@ class BaseEntity:
|
|||
base_url: str = ""
|
||||
guid: str = ""
|
||||
handle: str = ""
|
||||
finger: str = ""
|
||||
followers: str = ""
|
||||
id: str = ""
|
||||
key_id: str = ""
|
||||
mxid: str = ""
|
||||
signature: str = ""
|
||||
# for AP
|
||||
to: List = []
|
||||
cc: List = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._required = ["id", "actor_id"]
|
||||
self._children = []
|
||||
self._mentions = set()
|
||||
self._receivers = []
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(self, key):
|
||||
|
||||
# make the assumption that if a schema is being used, the payload
|
||||
# is (de)serialized and validated properly
|
||||
if hasattr(self, 'schema') or kwargs.get('schema'):
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
warnings.warn("%s.__init__ got parameter %s which this class does not support - ignoring." % (
|
||||
self.__class__.__name__, key
|
||||
))
|
||||
else:
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
else:
|
||||
warnings.warn("%s.__init__ got parameter %s which this class does not support - ignoring." % (
|
||||
self.__class__.__name__, key
|
||||
))
|
||||
if not self.activity:
|
||||
# Fill a default activity if not given and type of entity class has one
|
||||
self.activity = getattr(self, "_default_activity", None)
|
||||
|
||||
def as_protocol(self, protocol):
|
||||
entities = importlib.import_module(f"federation.entities.{protocol}.entities")
|
||||
klass = getattr(entities, f"{protocol.title()}{self.__class__.__name__}")
|
||||
return klass.from_base(self)
|
||||
|
||||
def post_receive(self):
|
||||
"""
|
||||
Run any actions after deserializing the payload into an entity.
|
||||
|
@ -93,7 +107,7 @@ class BaseEntity:
|
|||
def _validate_required(self, attributes):
|
||||
"""Ensure required attributes are present."""
|
||||
required_fulfilled = set(self._required).issubset(set(attributes))
|
||||
if not required_fulfilled:
|
||||
if not required_fulfilled or required_fulfilled is missing:
|
||||
raise ValueError(
|
||||
"Not all required attributes fulfilled. Required: {required}".format(required=set(self._required))
|
||||
)
|
||||
|
@ -108,7 +122,7 @@ class BaseEntity:
|
|||
attrs_to_check = set(self._required) & set(attributes)
|
||||
for attr in attrs_to_check:
|
||||
value = getattr(self, attr) # We should always have a value here
|
||||
if value is None or value == "":
|
||||
if value is None or value == "" or value is missing:
|
||||
raise ValueError(
|
||||
"Attribute %s cannot be None or an empty string since it is required." % attr
|
||||
)
|
||||
|
@ -182,6 +196,7 @@ class ParticipationMixin(TargetIDMixin):
|
|||
|
||||
class CreatedAtMixin(BaseEntity):
|
||||
created_at = None
|
||||
times: dict = {}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -193,7 +208,7 @@ class CreatedAtMixin(BaseEntity):
|
|||
class RawContentMixin(BaseEntity):
|
||||
_media_type: str = "text/markdown"
|
||||
_mentions: Set = None
|
||||
_rendered_content: str = ""
|
||||
rendered_content: str = ""
|
||||
raw_content: str = ""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -202,68 +217,60 @@ class RawContentMixin(BaseEntity):
|
|||
self._required += ["raw_content"]
|
||||
|
||||
@property
|
||||
def rendered_content(self) -> str:
|
||||
"""Returns the rendered version of raw_content, or just raw_content."""
|
||||
from federation.utils.django import get_configuration
|
||||
try:
|
||||
config = get_configuration()
|
||||
if config["tags_path"]:
|
||||
def linkifier(tag: str) -> str:
|
||||
return f'<a href="{config["base_url"]}{config["tags_path"].replace(":tag:", tag.lower())}" ' \
|
||||
f'class="mention hashtag" rel="noopener noreferrer">' \
|
||||
f'#<span>{tag}</span></a>'
|
||||
else:
|
||||
linkifier = None
|
||||
except ImportError:
|
||||
linkifier = None
|
||||
def embedded_images(self) -> List[Tuple[str, str]]:
|
||||
"""
|
||||
Returns a list of images from the raw_content.
|
||||
Currently only markdown supported.
|
||||
|
||||
if self._rendered_content:
|
||||
return self._rendered_content
|
||||
elif self._media_type == "text/markdown" and self.raw_content:
|
||||
# Do tags
|
||||
_tags, rendered = find_tags(self.raw_content, replacer=linkifier)
|
||||
# Render markdown to HTML
|
||||
rendered = commonmark(rendered).strip()
|
||||
# Do mentions
|
||||
if self._mentions:
|
||||
for mention in self._mentions:
|
||||
# Only linkify mentions that are URL's
|
||||
if not mention.startswith("http"):
|
||||
continue
|
||||
display_name = get_name_for_profile(mention)
|
||||
if not display_name:
|
||||
display_name = mention
|
||||
rendered = rendered.replace(
|
||||
"@{%s}" % mention,
|
||||
f'@<a href="{mention}" class="mention"><span>{display_name}</span></a>',
|
||||
)
|
||||
# Finally linkify remaining URL's that are not links
|
||||
rendered = process_text_links(rendered)
|
||||
return rendered
|
||||
return self.raw_content
|
||||
Returns a Tuple of (url, filename).
|
||||
"""
|
||||
images = []
|
||||
if hasattr(self, '_soup'):
|
||||
for img in self._soup.find_all('img', src=re.compile(r'^http')):
|
||||
images.append((img['src'], img.get('title', '') or img.get('alt', '')))
|
||||
else:
|
||||
if self._media_type != "text/markdown" or self.raw_content is None:
|
||||
return images
|
||||
regex = r"!\[([\w\s\-\']*)\]\((https?://[\w\d\-\./]+\.[\w]*((?<=jpg)|(?<=gif)|(?<=png)|(?<=jpeg)))\)"
|
||||
matches = re.finditer(regex, self.raw_content, re.MULTILINE | re.IGNORECASE)
|
||||
for match in matches:
|
||||
groups = match.groups()
|
||||
images.append((groups[1], groups[0] or ""))
|
||||
return images
|
||||
|
||||
# Legacy. Keep this until tests are reworked
|
||||
@property
|
||||
def tags(self) -> List[str]:
|
||||
"""Returns a `list` of unique tags contained in `raw_content`."""
|
||||
if not self.raw_content:
|
||||
return []
|
||||
tags, _text = find_tags(self.raw_content)
|
||||
return sorted(tags)
|
||||
return sorted(find_tags(self.raw_content))
|
||||
|
||||
def extract_mentions(self):
|
||||
matches = re.findall(r'@{([\S ][^{}]+)}', self.raw_content)
|
||||
if not matches:
|
||||
if not self.raw_content:
|
||||
return
|
||||
for mention in matches:
|
||||
mentions = find_elements(
|
||||
BeautifulSoup(
|
||||
commonmark(self.raw_content, ignore_html_blocks=True), 'html.parser'),
|
||||
MENTION_PATTERN)
|
||||
for ns in mentions:
|
||||
mention = ns.text
|
||||
handle = None
|
||||
splits = mention.split(";")
|
||||
if len(splits) == 1:
|
||||
self._mentions.add(splits[0].strip(' }'))
|
||||
handle = splits[0].strip(' }').lstrip('@{')
|
||||
elif len(splits) == 2:
|
||||
self._mentions.add(splits[1].strip(' }'))
|
||||
handle = splits[1].strip(' }')
|
||||
if handle:
|
||||
self._mentions.add(handle)
|
||||
self.raw_content = self.raw_content.replace(mention, '@' + handle)
|
||||
# mardownify the extracted mention in case some characters are escaped in
|
||||
# raw_content
|
||||
self.raw_content = self.raw_content.replace(markdownify(mention), '@' + handle)
|
||||
|
||||
|
||||
class OptionalRawContentMixin(RawContentMixin):
|
||||
"""A version of the RawContentMixin where `raw_content` is not required."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._required.remove("raw_content")
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import inspect
|
||||
from typing import Optional
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from federation.entities.base import Profile
|
||||
|
||||
|
||||
def get_base_attributes(entity):
|
||||
def get_base_attributes(entity, keep=()):
|
||||
"""Build a dict of attributes of an entity.
|
||||
|
||||
Returns attributes and their values, ignoring any properties, functions and anything that starts
|
||||
|
@ -11,8 +14,10 @@ def get_base_attributes(entity):
|
|||
attributes = {}
|
||||
cls = entity.__class__
|
||||
for attr, _ in inspect.getmembers(cls, lambda o: not isinstance(o, property) and not inspect.isroutine(o)):
|
||||
if not attr.startswith("_"):
|
||||
attributes[attr] = getattr(entity, attr)
|
||||
if not attr.startswith("_") or attr in keep:
|
||||
value = getattr(entity, attr)
|
||||
if value or isinstance(value, bool):
|
||||
attributes[attr] = value
|
||||
return attributes
|
||||
|
||||
|
||||
|
@ -36,3 +41,20 @@ def get_name_for_profile(fid: str) -> Optional[str]:
|
|||
return profile.name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def get_profile(**kwargs):
|
||||
# type: (str) -> Profile
|
||||
"""
|
||||
Get a profile via the configured profile getter.
|
||||
|
||||
Currently only works with Django configuration.
|
||||
"""
|
||||
try:
|
||||
from federation.utils.django import get_function_from_config
|
||||
profile_func = get_function_from_config("get_profile_function")
|
||||
if not profile_func:
|
||||
return
|
||||
return profile_func(**kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
|
|
|
@ -13,7 +13,7 @@ logger = logging.getLogger("federation")
|
|||
|
||||
def retrieve_remote_content(
|
||||
id: str, guid: str = None, handle: str = None, entity_type: str = None,
|
||||
sender_key_fetcher: Callable[[str], str] = None,
|
||||
sender_key_fetcher: Callable[[str], str] = None, cache: bool=True,
|
||||
):
|
||||
"""Retrieve remote content and return an Entity object.
|
||||
|
||||
|
@ -28,7 +28,8 @@ def retrieve_remote_content(
|
|||
protocol_name = identify_protocol_by_id(id).PROTOCOL_NAME
|
||||
utils = importlib.import_module("federation.utils.%s" % protocol_name)
|
||||
return utils.retrieve_and_parse_content(
|
||||
id=id, guid=guid, handle=handle, entity_type=entity_type, sender_key_fetcher=sender_key_fetcher,
|
||||
id=id, guid=guid, handle=handle, entity_type=entity_type,
|
||||
cache=cache, sender_key_fetcher=sender_key_fetcher,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import logging
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from django.http import HttpResponseBadRequest, JsonResponse, HttpResponseNotFound
|
||||
|
||||
from federation.hostmeta.generators import RFC7033Webfinger, generate_nodeinfo2_document
|
||||
from federation.hostmeta.generators import (
|
||||
RFC7033Webfinger, generate_nodeinfo2_document, MatrixClientWellKnown, MatrixServerWellKnown,
|
||||
)
|
||||
from federation.utils.django import get_configuration, get_function_from_config
|
||||
from federation.utils.text import get_path_from_url
|
||||
|
||||
|
@ -19,6 +22,34 @@ def nodeinfo2_view(request, *args, **kwargs):
|
|||
return JsonResponse(generate_nodeinfo2_document(**nodeinfo2))
|
||||
|
||||
|
||||
def matrix_client_wellknown_view(request, *args, **kwargs):
|
||||
try:
|
||||
matrix_config_func = get_function_from_config("matrix_config_function")
|
||||
except AttributeError:
|
||||
return HttpResponseBadRequest("Not configured")
|
||||
matrix_config = matrix_config_func()
|
||||
|
||||
wellknown = MatrixClientWellKnown(
|
||||
homeserver_base_url=matrix_config["homeserver_base_url"],
|
||||
identity_server_base_url=matrix_config.get("identity_server_base_url"),
|
||||
other_keys=matrix_config.get("client_wellknown_other_keys"),
|
||||
)
|
||||
return JsonResponse(wellknown.render())
|
||||
|
||||
|
||||
def matrix_server_wellknown_view(request, *args, **kwargs):
|
||||
try:
|
||||
matrix_config_func = get_function_from_config("matrix_config_function")
|
||||
except AttributeError:
|
||||
return HttpResponseBadRequest("Not configured")
|
||||
matrix_config = matrix_config_func()
|
||||
|
||||
wellknown = MatrixServerWellKnown(
|
||||
homeserver_domain_with_port=matrix_config["homeserver_domain_with_port"],
|
||||
)
|
||||
return JsonResponse(wellknown.render())
|
||||
|
||||
|
||||
def rfc7033_webfinger_view(request, *args, **kwargs):
|
||||
"""
|
||||
Django view to generate an RFC7033 webfinger.
|
||||
|
@ -29,7 +60,7 @@ def rfc7033_webfinger_view(request, *args, **kwargs):
|
|||
if not resource.startswith("acct:"):
|
||||
return HttpResponseBadRequest("Invalid resource")
|
||||
handle = resource.replace("acct:", "").lower()
|
||||
logger.debug(f"{handle} requested with {request}")
|
||||
logger.debug("%s requested with %s", handle, request)
|
||||
profile_func = get_function_from_config("get_profile_function")
|
||||
|
||||
try:
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
# noinspection PyPackageRequirements
|
||||
from django.conf.urls import url
|
||||
|
||||
from federation.hostmeta.django import rfc7033_webfinger_view
|
||||
from federation.hostmeta.django.generators import nodeinfo2_view
|
||||
from federation.hostmeta.django.generators import (
|
||||
nodeinfo2_view, matrix_client_wellknown_view, matrix_server_wellknown_view,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^.well-known/matrix/client$', matrix_client_wellknown_view, name="matrix-client-wellknown"),
|
||||
url(r'^.well-known/matrix/server$', matrix_server_wellknown_view, name="matrix-server-wellknown"),
|
||||
url(r'^.well-known/webfinger$', rfc7033_webfinger_view, name="rfc7033-webfinger"),
|
||||
url(r'^.well-known/x-nodeinfo2$', nodeinfo2_view, name="nodeinfo2"),
|
||||
]
|
||||
|
|
|
@ -3,6 +3,7 @@ import os
|
|||
import warnings
|
||||
from base64 import b64encode
|
||||
from string import Template
|
||||
from typing import Dict
|
||||
|
||||
from jsonschema import validate
|
||||
from jsonschema.exceptions import ValidationError
|
||||
|
@ -331,6 +332,43 @@ def get_nodeinfo_well_known_document(url, document_path=None):
|
|||
}
|
||||
|
||||
|
||||
class MatrixClientWellKnown:
|
||||
"""
|
||||
Matrix Client well-known as per https://matrix.org/docs/spec/client_server/r0.6.1#server-discovery
|
||||
"""
|
||||
def __init__(self, homeserver_base_url: str, identity_server_base_url: str = None, other_keys: Dict = None):
|
||||
self.homeserver_base_url = homeserver_base_url
|
||||
self.identity_server_base_url = identity_server_base_url
|
||||
self.other_keys = other_keys
|
||||
|
||||
def render(self):
|
||||
doc = {
|
||||
"m.homeserver": {
|
||||
"base_url": self.homeserver_base_url,
|
||||
}
|
||||
}
|
||||
if self.identity_server_base_url:
|
||||
doc["m.identity_server"] = {
|
||||
"base_url": self.identity_server_base_url,
|
||||
}
|
||||
if self.other_keys:
|
||||
doc.update(self.other_keys)
|
||||
return doc
|
||||
|
||||
|
||||
class MatrixServerWellKnown:
|
||||
"""
|
||||
Matrix Server well-known as per https://matrix.org/docs/spec/server_server/r0.1.4#server-discovery
|
||||
"""
|
||||
def __init__(self, homeserver_domain_with_port: str):
|
||||
self.homeserver_domain_with_port = homeserver_domain_with_port
|
||||
|
||||
def render(self):
|
||||
return {
|
||||
"m.server": self.homeserver_domain_with_port,
|
||||
}
|
||||
|
||||
|
||||
class RFC7033Webfinger:
|
||||
"""
|
||||
RFC 7033 webfinger - see https://tools.ietf.org/html/rfc7033
|
||||
|
|
|
@ -7,6 +7,7 @@ from typing import List, Dict, Union
|
|||
|
||||
# noinspection PyPackageRequirements
|
||||
from Crypto.PublicKey import RSA
|
||||
# noinspection PyPackageRequirements
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
from iteration_utilities import unique_everseen
|
||||
|
||||
|
@ -14,6 +15,7 @@ from federation.entities.activitypub.constants import NAMESPACE_PUBLIC
|
|||
from federation.entities.mixins import BaseEntity
|
||||
from federation.protocols.activitypub.signing import get_http_authentication
|
||||
from federation.types import UserType
|
||||
from federation.utils.matrix import get_matrix_configuration
|
||||
from federation.utils.network import send_document
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
@ -26,7 +28,7 @@ def handle_create_payload(
|
|||
to_user_key: RsaKey = None,
|
||||
parent_user: UserType = None,
|
||||
payload_logger: callable = None,
|
||||
) -> Union[str, dict]:
|
||||
) -> Union[str, Dict, List[Dict]]:
|
||||
"""Create a payload with the given protocol.
|
||||
|
||||
Any given user arguments must have ``private_key`` and ``handle`` attributes.
|
||||
|
@ -40,11 +42,13 @@ def handle_create_payload(
|
|||
be generated. If given, the payload will be sent as this user.
|
||||
:arg payload_logger: (Optional) Function to log the payloads with.
|
||||
|
||||
:returns: Built payload (str or dict)
|
||||
:returns: Built payload(s) (str or dict or list (of payloads))
|
||||
"""
|
||||
mappers = importlib.import_module(f"federation.entities.{protocol_name}.mappers")
|
||||
protocol = importlib.import_module(f"federation.protocols.{protocol_name}.protocol")
|
||||
# noinspection PyUnresolvedReferences
|
||||
protocol = protocol.Protocol()
|
||||
# noinspection PyUnresolvedReferences
|
||||
outbound_entity = mappers.get_outbound_entity(entity, author_user.rsa_private_key)
|
||||
if parent_user:
|
||||
outbound_entity.sign_with_parent(parent_user.rsa_private_key)
|
||||
|
@ -119,21 +123,38 @@ def handle_send(
|
|||
"protocol": "activitypub",
|
||||
"public": False,
|
||||
},
|
||||
{
|
||||
"endpoint": "https://matrix.domain.tld",
|
||||
"fid": "#@user:domain.tld",
|
||||
"protocol": "matrix",
|
||||
"public": True,
|
||||
}
|
||||
]
|
||||
:arg parent_user: (Optional) User object of the parent object, if there is one. This must be given for the
|
||||
Diaspora protocol if a parent object exists, so that a proper ``parent_author_signature`` can
|
||||
be generated. If given, the payload will be sent as this user.
|
||||
be generated. If given, the payload will be sent as this user. For Activitypub, the
|
||||
parent_user's private key will be used to generate the http signature if the author_user
|
||||
is not a local user.
|
||||
|
||||
:arg payload_logger: (Optional) Function to log the payloads with.
|
||||
"""
|
||||
payloads = []
|
||||
ready_payloads = {
|
||||
"activitypub": {
|
||||
"auth": None,
|
||||
"headers": {},
|
||||
"payload": None,
|
||||
"urls": set(),
|
||||
},
|
||||
"diaspora": {
|
||||
"auth": None,
|
||||
"headers": {},
|
||||
"payload": None,
|
||||
"urls": set(),
|
||||
},
|
||||
"matrix": {
|
||||
"auth": None,
|
||||
"headers": {},
|
||||
"payload": None,
|
||||
"urls": set(),
|
||||
},
|
||||
|
@ -141,11 +162,16 @@ def handle_send(
|
|||
skip_ready_payload = {
|
||||
"activitypub": False,
|
||||
"diaspora": False,
|
||||
"matrix": False,
|
||||
}
|
||||
|
||||
logger.debug('handle_send - length of recipients: %s', len(recipients))
|
||||
# Flatten to unique recipients
|
||||
# TODO supply a callable that empties "fid" in the case that public=True
|
||||
unique_recipients = unique_everseen(recipients)
|
||||
unique_recipients = list(unique_everseen(recipients, key=lambda val: val['endpoint']))
|
||||
logger.debug('handle_send - length of unique_recipients: %s', len(unique_recipients))
|
||||
logger.debug('handle_send / unique_recipients - %s', unique_recipients)
|
||||
|
||||
matrix_config = None
|
||||
|
||||
# Generate payloads and collect urls
|
||||
for recipient in unique_recipients:
|
||||
|
@ -160,11 +186,14 @@ def handle_send(
|
|||
|
||||
if protocol == "activitypub":
|
||||
if skip_ready_payload["activitypub"]:
|
||||
logger.debug('Skipping activitypub payload as skip_ready_payload set')
|
||||
continue
|
||||
if entity.__class__.__name__.startswith("Diaspora"):
|
||||
# Don't try to do anything with Diaspora entities currently
|
||||
if entity.__class__.__name__.startswith("Diaspora") or entity.__class__.__name__.startswith("Matrix"):
|
||||
# Don't try to do anything with these entities currently
|
||||
skip_ready_payload["activitypub"] = True
|
||||
logger.debug('Skipping activitypub payload as payload is diaspora or matrix')
|
||||
continue
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
if not ready_payloads[protocol]["payload"]:
|
||||
try:
|
||||
|
@ -178,20 +207,11 @@ def handle_send(
|
|||
logger.warning("handle_send - skipping activitypub due to failure to generate payload: %s", ex)
|
||||
continue
|
||||
payload = copy.copy(ready_payloads[protocol]["payload"])
|
||||
if public:
|
||||
payload["to"] = [NAMESPACE_PUBLIC]
|
||||
payload["cc"] = [fid]
|
||||
if isinstance(payload.get("object"), dict):
|
||||
payload["object"]["to"] = [NAMESPACE_PUBLIC]
|
||||
payload["object"]["cc"] = [fid]
|
||||
else:
|
||||
payload["to"] = [fid]
|
||||
if isinstance(payload.get("object"), dict):
|
||||
payload["object"]["to"] = [fid]
|
||||
rendered_payload = json.dumps(payload).encode("utf-8")
|
||||
except Exception:
|
||||
logger.error(
|
||||
"handle_send - failed to generate payload for %s, %s: %s", fid, endpoint, traceback.format_exc(),
|
||||
"handle_send - failed to generate activitypub payload for %s, %s: %s",
|
||||
fid, endpoint, traceback.format_exc(),
|
||||
extra={
|
||||
"recipient": recipient,
|
||||
"unique_recipients": list(unique_recipients),
|
||||
|
@ -200,23 +220,29 @@ def handle_send(
|
|||
"ready_payloads": ready_payloads,
|
||||
"entity": entity,
|
||||
"author": author_user.id,
|
||||
"parent_user": parent_user.id,
|
||||
"parent_user": parent_user.id if parent_user else None,
|
||||
}
|
||||
)
|
||||
continue
|
||||
# The parent_user MUST be local
|
||||
local_user = author_user if author_user.rsa_private_key else parent_user
|
||||
payloads.append({
|
||||
"auth": get_http_authentication(author_user.rsa_private_key, f"{author_user.id}#main-key"),
|
||||
"auth": get_http_authentication(local_user.rsa_private_key, f"{local_user.id}#main-key"),
|
||||
"headers": {
|
||||
"Content-Type": 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||
},
|
||||
"payload": rendered_payload,
|
||||
"content_type": 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||
"urls": {endpoint},
|
||||
})
|
||||
elif protocol == "diaspora":
|
||||
if entity.__class__.__name__.startswith("Activitypub"):
|
||||
# Don't try to do anything with Activitypub entities currently
|
||||
if entity.__class__.__name__.startswith("Activitypub") or entity.__class__.__name__.startswith("Matrix"):
|
||||
# Don't try to do anything with these entities currently
|
||||
skip_ready_payload["diaspora"] = True
|
||||
logger.debug('Skipping diaspora payload as payload is activitypub or matrix')
|
||||
continue
|
||||
if public:
|
||||
if skip_ready_payload["diaspora"]:
|
||||
logger.debug('Skipping diaspora payload as skip_ready_payload set')
|
||||
continue
|
||||
if public_key:
|
||||
logger.warning("handle_send - Diaspora recipient cannot be public and use encrypted delivery")
|
||||
|
@ -249,14 +275,76 @@ def handle_send(
|
|||
logger.error("handle_send - failed to generate private payload for %s: %s", endpoint, ex)
|
||||
continue
|
||||
payloads.append({
|
||||
"urls": {endpoint}, "payload": payload, "content_type": "application/json", "auth": None,
|
||||
"auth": None,
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
"payload": payload,
|
||||
"urls": {endpoint},
|
||||
})
|
||||
elif protocol == "matrix":
|
||||
if skip_ready_payload["matrix"]:
|
||||
logger.debug('Skipping matrix payload as skip_ready_payload set')
|
||||
continue
|
||||
if entity.__class__.__name__.startswith("Activitypub") or entity.__class__.__name__.startswith("Diaspora"):
|
||||
# Don't try to do anything with these entities currently
|
||||
skip_ready_payload["matrix"] = True
|
||||
logger.debug('Skipping matrix payload as payload is activitypub or diaspora')
|
||||
continue
|
||||
payload_info = []
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
try:
|
||||
# For matrix we actually might get multiple payloads and endpoints
|
||||
payload_info = handle_create_payload(
|
||||
entity, author_user, protocol, parent_user=parent_user, payload_logger=payload_logger,
|
||||
)
|
||||
except ValueError as ex:
|
||||
# No point continuing for this protocol
|
||||
skip_ready_payload["matrix"] = True
|
||||
logger.warning("handle_send - skipping matrix due to failure to generate payload: %s", ex)
|
||||
continue
|
||||
if not matrix_config:
|
||||
matrix_config = get_matrix_configuration()
|
||||
for payload in payload_info:
|
||||
rendered_payload = json.dumps(payload["payload"]).encode("utf-8")
|
||||
payloads.append({
|
||||
"auth": None,
|
||||
"headers": {
|
||||
"Authorization": f"Bearer {matrix_config['appservice']['token']}",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
"payload": rendered_payload,
|
||||
"urls": {payload["endpoint"]},
|
||||
"method": payload.get("method"),
|
||||
})
|
||||
except Exception:
|
||||
logger.error(
|
||||
"handle_send - failed to generate matrix payload for %s, %s: %s",
|
||||
fid, endpoint, traceback.format_exc(),
|
||||
extra={
|
||||
"recipient": recipient,
|
||||
"unique_recipients": list(unique_recipients),
|
||||
"payload_info": payload_info,
|
||||
"payloads": payloads,
|
||||
"ready_payloads": ready_payloads,
|
||||
"entity": entity,
|
||||
"author": author_user.id,
|
||||
"parent_user": parent_user.id if parent_user else None,
|
||||
}
|
||||
)
|
||||
logger.debug('Continuing from matrix payload after error')
|
||||
continue
|
||||
|
||||
# Add public diaspora payload
|
||||
if ready_payloads["diaspora"]["payload"]:
|
||||
payloads.append({
|
||||
"urls": ready_payloads["diaspora"]["urls"], "payload": ready_payloads["diaspora"]["payload"],
|
||||
"content_type": "application/magic-envelope+xml", "auth": None,
|
||||
"auth": None,
|
||||
"headers": {
|
||||
"Content-Type": "application/magic-envelope+xml",
|
||||
},
|
||||
"payload": ready_payloads["diaspora"]["payload"],
|
||||
"urls": ready_payloads["diaspora"]["urls"],
|
||||
})
|
||||
|
||||
logger.debug("handle_send - %s", payloads)
|
||||
|
@ -265,11 +353,13 @@ def handle_send(
|
|||
for payload in payloads:
|
||||
for url in payload["urls"]:
|
||||
try:
|
||||
# TODO send_document and fetch_document need to handle rate limits
|
||||
send_document(
|
||||
url,
|
||||
payload["payload"],
|
||||
auth=payload["auth"],
|
||||
headers={"Content-Type": payload["content_type"]},
|
||||
auth=payload.get("auth"),
|
||||
headers=payload.get("headers"),
|
||||
method=payload.get("method"),
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.error("handle_send - failed to send payload to %s: %s, payload: %s", url, ex, payload["payload"])
|
||||
|
|
|
@ -3,14 +3,18 @@ import logging
|
|||
import re
|
||||
from typing import Callable, Tuple, Union, Dict
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
|
||||
from federation.entities.activitypub.enums import ActorType
|
||||
from federation.entities.mixins import BaseEntity
|
||||
from federation.entities.utils import get_profile
|
||||
from federation.protocols.activitypub.signing import verify_request_signature
|
||||
from federation.types import UserType, RequestType
|
||||
from federation.utils.activitypub import retrieve_and_parse_document
|
||||
from federation.utils.text import decode_if_bytes
|
||||
|
||||
|
||||
logger = logging.getLogger('federation')
|
||||
|
||||
PROTOCOL_NAME = "activitypub"
|
||||
|
@ -42,8 +46,14 @@ class Protocol:
|
|||
get_contact_key = None
|
||||
payload = None
|
||||
request = None
|
||||
sender = None
|
||||
user = None
|
||||
|
||||
def __init__(self, request=None, get_contact_key=None):
|
||||
# this is required for calls to verify on GET requests
|
||||
self.request = request
|
||||
self.get_contact_key = get_contact_key
|
||||
|
||||
def build_send(self, entity: BaseEntity, from_user: UserType, to_user_key: RsaKey = None) -> Union[str, Dict]:
|
||||
"""
|
||||
Build POST data for sending out to remotes.
|
||||
|
@ -57,7 +67,7 @@ class Protocol:
|
|||
# Use pregenerated outbound document
|
||||
rendered = entity.outbound_doc
|
||||
else:
|
||||
rendered = entity.to_as2()
|
||||
rendered = entity.sign_as2(sender=from_user)
|
||||
return rendered
|
||||
|
||||
def extract_actor(self):
|
||||
|
@ -84,9 +94,34 @@ class Protocol:
|
|||
self.extract_actor()
|
||||
# Verify the message is from who it claims to be
|
||||
if not skip_author_verification:
|
||||
self.verify_signature()
|
||||
return self.actor, self.payload
|
||||
try:
|
||||
# Verify the HTTP signature
|
||||
self.verify()
|
||||
except (ValueError, KeyError, InvalidSignature) as exc:
|
||||
logger.warning('HTTP signature verification failed: %s', exc)
|
||||
return self.actor, {}
|
||||
return self.sender, self.payload
|
||||
|
||||
def verify_signature(self):
|
||||
# Verify the HTTP signature
|
||||
verify_request_signature(self.request, self.get_contact_key(self.actor))
|
||||
def verify(self):
|
||||
sig_struct = self.request.headers.get("Signature", None)
|
||||
if not sig_struct:
|
||||
raise ValueError("A signature is required but was not provided")
|
||||
|
||||
# this should return a dict populated with the following keys:
|
||||
# keyId, algorithm, headers and signature
|
||||
sig = {i.split("=", 1)[0]: i.split("=", 1)[1].strip('"') for i in sig_struct.split(",")}
|
||||
|
||||
signer = get_profile(key_id=sig.get('keyId'))
|
||||
if not signer:
|
||||
signer = retrieve_and_parse_document(sig.get('keyId'))
|
||||
self.sender = signer.id if signer else self.actor
|
||||
key = getattr(signer, 'public_key', None)
|
||||
if not key:
|
||||
key = self.get_contact_key(self.actor) if self.get_contact_key and self.actor else ''
|
||||
if key:
|
||||
# fallback to the author's key the client app may have provided
|
||||
logger.warning("Failed to retrieve keyId for %s, trying the actor's key", sig.get('keyId'))
|
||||
else:
|
||||
raise ValueError(f"No public key for {sig.get('keyId')}")
|
||||
|
||||
verify_request_signature(self.request, key=key, algorithm=sig.get('algorithm',""))
|
||||
|
|
|
@ -5,11 +5,13 @@ https://funkwhale.audio/
|
|||
"""
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Union
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import pytz
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
from requests_http_signature import HTTPSignatureHeaderAuth
|
||||
from httpsig.sign_algorithms import PSS
|
||||
from httpsig.requests_auth import HTTPSignatureAuth
|
||||
from httpsig.verify import HeaderVerifier
|
||||
|
||||
from federation.types import RequestType
|
||||
from federation.utils.network import parse_http_date
|
||||
|
@ -18,27 +20,29 @@ from federation.utils.text import encode_if_text
|
|||
logger = logging.getLogger("federation")
|
||||
|
||||
|
||||
def get_http_authentication(private_key: RsaKey, private_key_id: str) -> HTTPSignatureHeaderAuth:
|
||||
def get_http_authentication(private_key: RsaKey, private_key_id: str, digest: bool=True) -> HTTPSignatureAuth:
|
||||
"""
|
||||
Get HTTP signature authentication for a request.
|
||||
"""
|
||||
key = private_key.exportKey()
|
||||
return HTTPSignatureHeaderAuth(
|
||||
headers=["(request-target)", "user-agent", "host", "date"],
|
||||
headers = ["(request-target)", "user-agent", "host", "date"]
|
||||
if digest: headers.append('digest')
|
||||
return HTTPSignatureAuth(
|
||||
headers=headers,
|
||||
algorithm="rsa-sha256",
|
||||
key=key,
|
||||
secret=key,
|
||||
key_id=private_key_id,
|
||||
)
|
||||
|
||||
|
||||
def verify_request_signature(request: RequestType, public_key: Union[str, bytes]):
|
||||
def verify_request_signature(request: RequestType, key: str="", algorithm: str=""):
|
||||
"""
|
||||
Verify HTTP signature in request against a public key.
|
||||
"""
|
||||
key = encode_if_text(public_key)
|
||||
key = encode_if_text(key)
|
||||
date_header = request.headers.get("Date")
|
||||
if not date_header:
|
||||
raise ValueError("Rquest Date header is missing")
|
||||
raise ValueError("Request Date header is missing")
|
||||
|
||||
ts = parse_http_date(date_header)
|
||||
dt = datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=pytz.utc)
|
||||
|
@ -48,4 +52,8 @@ def verify_request_signature(request: RequestType, public_key: Union[str, bytes]
|
|||
if dt < now - past_delta or dt > now + future_delta:
|
||||
raise ValueError("Request Date is too far in future or past")
|
||||
|
||||
HTTPSignatureHeaderAuth.verify(request, key_resolver=lambda **kwargs: key)
|
||||
path = getattr(request, 'path', urlsplit(request.url).path)
|
||||
if not HeaderVerifier(request.headers, key, method=request.method,
|
||||
path=path, sign_header='signature',
|
||||
sign_algorithm=PSS() if algorithm == 'hs2019' else None).verify():
|
||||
raise ValueError("Invalid signature")
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
from typing import Dict
|
||||
|
||||
import yaml
|
||||
|
||||
from federation.utils.django import get_configuration
|
||||
from federation.utils.matrix import get_matrix_configuration
|
||||
|
||||
|
||||
def get_registration_config() -> Dict:
|
||||
"""
|
||||
Get registration config.
|
||||
|
||||
Requires Django support currently.
|
||||
"""
|
||||
config = get_configuration()
|
||||
matrix_config = get_matrix_configuration()
|
||||
|
||||
if not matrix_config.get("appservice"):
|
||||
raise Exception("No appservice configured")
|
||||
|
||||
return {
|
||||
"id": matrix_config["appservice"]["id"],
|
||||
"url": f"{config['base_url']}/matrix",
|
||||
"as_token": matrix_config["appservice"]["token"],
|
||||
"hs_token": matrix_config["appservice"]["token"],
|
||||
"sender_localpart": f'_{matrix_config["appservice"]["shortcode"]}',
|
||||
"namespaces": {
|
||||
# We reserve two namespaces
|
||||
# One is not exclusive, since we're interested in events of "real" users
|
||||
# One is exclusive, the ones that represent "remote to us but managed by us towards Matrix"
|
||||
"users": [
|
||||
{
|
||||
"exclusive": False,
|
||||
"regex": "@.*",
|
||||
},
|
||||
{
|
||||
"exclusive": True,
|
||||
"regex": f"@_{matrix_config['appservice']['shortcode']}_.*"
|
||||
},
|
||||
],
|
||||
"aliases": [
|
||||
{
|
||||
"exclusive": False,
|
||||
"regex": "#.*",
|
||||
},
|
||||
{
|
||||
"exclusive": True,
|
||||
"regex": f"#_{matrix_config['appservice']['shortcode']}_.*"
|
||||
},
|
||||
],
|
||||
"rooms": [],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def print_registration_yaml():
|
||||
"""
|
||||
Print registration file details.
|
||||
|
||||
Requires Django support currently.
|
||||
"""
|
||||
registration = get_registration_config()
|
||||
print(yaml.safe_dump(registration))
|
|
@ -0,0 +1,72 @@
|
|||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Callable, Tuple, List, Dict
|
||||
|
||||
from federation.entities.matrix.entities import MatrixEntityMixin
|
||||
from federation.types import UserType, RequestType
|
||||
from federation.utils.text import decode_if_bytes
|
||||
|
||||
logger = logging.getLogger('federation')
|
||||
|
||||
PROTOCOL_NAME = "activitypub"
|
||||
|
||||
|
||||
def identify_id(identifier: str) -> bool:
|
||||
"""
|
||||
Try to identify whether this is a Matrix identifier.
|
||||
|
||||
TODO fix, not entirely correct..
|
||||
"""
|
||||
return re.match(r'^[@#!].*:.*$', identifier, flags=re.IGNORECASE) is not None
|
||||
|
||||
|
||||
def identify_request(request: RequestType) -> bool:
|
||||
"""
|
||||
Try to identify whether this is a Matrix request
|
||||
"""
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
data = json.loads(decode_if_bytes(request.body))
|
||||
if "events" in data:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class Protocol:
|
||||
actor = None
|
||||
get_contact_key = None
|
||||
payload = None
|
||||
request = None
|
||||
user = None
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
@staticmethod
|
||||
def build_send(entity: MatrixEntityMixin, *args, **kwargs) -> List[Dict]:
|
||||
"""
|
||||
Build POST data for sending out to the homeserver.
|
||||
|
||||
:param entity: The outbound ready entity for this protocol.
|
||||
:returns: list of payloads
|
||||
"""
|
||||
return entity.payloads()
|
||||
|
||||
def extract_actor(self):
|
||||
# TODO TBD
|
||||
pass
|
||||
|
||||
def receive(
|
||||
self,
|
||||
request: RequestType,
|
||||
user: UserType = None,
|
||||
sender_key_fetcher: Callable[[str], str] = None,
|
||||
skip_author_verification: bool = False) -> Tuple[str, dict]:
|
||||
"""
|
||||
Receive a request.
|
||||
|
||||
Matrix appservices will deliver 1+ events at a time.
|
||||
"""
|
||||
# TODO TBD
|
||||
return self.actor, self.payload
|
|
@ -1,6 +1,8 @@
|
|||
from unittest.mock import Mock
|
||||
from unittest.mock import Mock, DEFAULT
|
||||
|
||||
import pytest
|
||||
import inspect
|
||||
import requests
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from federation.tests.fixtures.entities import *
|
||||
|
@ -13,7 +15,7 @@ def disable_network_calls(monkeypatch):
|
|||
"""Disable network calls."""
|
||||
monkeypatch.setattr("requests.post", Mock())
|
||||
|
||||
class MockResponse(str):
|
||||
class MockGetResponse(str):
|
||||
status_code = 200
|
||||
text = ""
|
||||
|
||||
|
@ -21,8 +23,23 @@ def disable_network_calls(monkeypatch):
|
|||
def raise_for_status():
|
||||
pass
|
||||
|
||||
monkeypatch.setattr("requests.get", Mock(return_value=MockResponse))
|
||||
saved_get = requests.get
|
||||
def side_effect(*args, **kwargs):
|
||||
if "pyld/documentloader" in inspect.stack()[4][1]:
|
||||
return saved_get(*args, **kwargs)
|
||||
return DEFAULT
|
||||
|
||||
monkeypatch.setattr("requests.get", Mock(return_value=MockGetResponse, side_effect=side_effect))
|
||||
|
||||
class MockHeadResponse(dict):
|
||||
status_code = 200
|
||||
headers = {'Content-Type':'image/jpeg'}
|
||||
|
||||
@staticmethod
|
||||
def raise_for_status():
|
||||
pass
|
||||
|
||||
monkeypatch.setattr("requests.head", Mock(return_value=MockHeadResponse))
|
||||
|
||||
@pytest.fixture
|
||||
def private_key():
|
||||
|
|
|
@ -4,9 +4,12 @@ INSTALLED_APPS = tuple()
|
|||
|
||||
FEDERATION = {
|
||||
"base_url": "https://example.com",
|
||||
"federation_id": "https://example.com/u/john/",
|
||||
"get_object_function": "federation.tests.django.utils.get_object_function",
|
||||
"get_private_key_function": "federation.tests.django.utils.get_private_key",
|
||||
"get_public_key_function": "federation.tests.django.utils.get_public_key",
|
||||
"get_profile_function": "federation.tests.django.utils.get_profile",
|
||||
"matrix_config_function": "federation.tests.django.utils.matrix_config_func",
|
||||
"process_payload_function": "federation.tests.django.utils.process_payload",
|
||||
"search_path": "/search?q=",
|
||||
"tags_path": "/tag/:tag:/",
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
from typing import Dict
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
|
||||
from federation.entities.base import Profile
|
||||
from federation.tests.fixtures.keys import get_dummy_private_key
|
||||
from federation.tests.fixtures.keys import get_dummy_private_key, get_dummy_public_key
|
||||
|
||||
|
||||
def dummy_profile():
|
||||
|
@ -15,7 +18,7 @@ def dummy_profile():
|
|||
)
|
||||
|
||||
|
||||
def get_object_function(object_id):
|
||||
def get_object_function(object_id, signer=None):
|
||||
return dummy_profile()
|
||||
|
||||
|
||||
|
@ -23,9 +26,31 @@ def get_private_key(identifier: str) -> RsaKey:
|
|||
return get_dummy_private_key()
|
||||
|
||||
|
||||
def get_public_key(identifier: str) -> RsaKey:
|
||||
return get_dummy_public_key()
|
||||
|
||||
|
||||
def get_profile(fid=None, handle=None, guid=None, request=None):
|
||||
return dummy_profile()
|
||||
|
||||
|
||||
def matrix_config_func() -> Dict:
|
||||
return {
|
||||
"homeserver_base_url": "https://matrix.domain.tld",
|
||||
"homeserver_domain_with_port": "matrix.domain.tld:443",
|
||||
"homeserver_name": "domain.tld",
|
||||
"appservice": {
|
||||
"id": "uniqueid",
|
||||
"shortcode": "myawesomeapp",
|
||||
"token": "secret_token",
|
||||
},
|
||||
"identity_server_base_url": "https://id.domain.tld",
|
||||
"client_wellknown_other_keys": {
|
||||
"org.foo.key" "barfoo",
|
||||
},
|
||||
"registration_shared_secret": "supersecretstring",
|
||||
}
|
||||
|
||||
|
||||
def process_payload(request):
|
||||
return True
|
||||
|
|
|
@ -38,7 +38,7 @@ class DummyRestrictedView(View):
|
|||
return HttpResponse("foo")
|
||||
|
||||
|
||||
def dummy_get_object_function(request):
|
||||
def dummy_get_object_function(request, signer=None):
|
||||
if request.method == 'GET':
|
||||
return False
|
||||
return True
|
||||
|
@ -59,13 +59,13 @@ class TestActivityPubObjectView:
|
|||
assert response.content == b'foo'
|
||||
|
||||
def test_receives_messages_to_inbox(self):
|
||||
request = RequestFactory().post("/inbox/", data='{"foo": "bar"}', content_type='application/json')
|
||||
request = RequestFactory().post("/u/bla/inbox/", data='{"foo": "bar"}', content_type='application/json')
|
||||
response = dummy_view(request=request)
|
||||
|
||||
assert response.status_code == 202
|
||||
|
||||
def test_receives_messages_to_inbox__cbv(self):
|
||||
request = RequestFactory().post("/inbox/", data='{"foo": "bar"}', content_type="application/json")
|
||||
request = RequestFactory().post("/u/bla/inbox/", data='{"foo": "bar"}', content_type="application/json")
|
||||
view = DummyView.as_view()
|
||||
response = view(request=request)
|
||||
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
import commonmark
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
from pprint import pprint
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
|
||||
from federation.entities.activitypub.constants import (
|
||||
CONTEXTS_DEFAULT, CONTEXT_MANUALLY_APPROVES_FOLLOWERS, CONTEXT_LD_SIGNATURES, CONTEXT_DIASPORA)
|
||||
from federation.entities.activitypub.entities import ActivitypubAccept
|
||||
from federation.entities.activitypub.models import context_manager
|
||||
from federation.entities.activitypub.models import Accept
|
||||
from federation.tests.fixtures.keys import PUBKEY
|
||||
from federation.types import UserType
|
||||
|
||||
|
@ -13,12 +16,11 @@ class TestEntitiesConvertToAS2:
|
|||
def test_accept_to_as2(self, activitypubaccept):
|
||||
result = activitypubaccept.to_as2()
|
||||
assert result == {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"@context": context_manager.build_context(activitypubaccept),
|
||||
"id": "https://localhost/accept",
|
||||
"type": "Accept",
|
||||
"actor": "https://localhost/profile",
|
||||
"object": {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"id": "https://localhost/follow",
|
||||
"type": "Follow",
|
||||
"actor": "https://localhost/profile",
|
||||
|
@ -26,10 +28,10 @@ class TestEntitiesConvertToAS2:
|
|||
},
|
||||
}
|
||||
|
||||
def test_accounce_to_as2(self, activitypubannounce):
|
||||
def test_announce_to_as2(self, activitypubannounce):
|
||||
result = activitypubannounce.to_as2()
|
||||
assert result == {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"@context": context_manager.build_context(activitypubannounce),
|
||||
"id": "http://127.0.0.1:8000/post/123456/#create",
|
||||
"type": "Announce",
|
||||
"actor": "http://127.0.0.1:8000/profile/123456/",
|
||||
|
@ -38,29 +40,22 @@ class TestEntitiesConvertToAS2:
|
|||
}
|
||||
|
||||
def test_comment_to_as2(self, activitypubcomment):
|
||||
activitypubcomment.pre_send()
|
||||
result = activitypubcomment.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubcomment),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'object': {
|
||||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<p>raw_content</p>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': 'http://127.0.0.1:8000/post/012345/',
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [],
|
||||
'url': '',
|
||||
'source': {
|
||||
'content': 'raw_content',
|
||||
'mediaType': 'text/markdown',
|
||||
|
@ -69,32 +64,29 @@ class TestEntitiesConvertToAS2:
|
|||
'published': '2019-04-27T00:00:00',
|
||||
}
|
||||
|
||||
# Now handled by the client app
|
||||
@pytest.mark.skip
|
||||
def test_comment_to_as2__url_in_raw_content(self, activitypubcomment):
|
||||
activitypubcomment.raw_content = 'raw_content http://example.com'
|
||||
activitypubcomment.rendered_content = process_text_links(
|
||||
commonmark.commonmark(activitypubcomment.raw_content).strip())
|
||||
activitypubcomment.pre_send()
|
||||
result = activitypubcomment.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubcomment),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'object': {
|
||||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<p>raw_content <a href="http://example.com" rel="nofollow" target="_blank">'
|
||||
'http://example.com</a></p>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': 'http://127.0.0.1:8000/post/012345/',
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [],
|
||||
'url': '',
|
||||
'source': {
|
||||
'content': 'raw_content http://example.com',
|
||||
'mediaType': 'text/markdown',
|
||||
|
@ -106,7 +98,7 @@ class TestEntitiesConvertToAS2:
|
|||
def test_follow_to_as2(self, activitypubfollow):
|
||||
result = activitypubfollow.to_as2()
|
||||
assert result == {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"@context": context_manager.build_context(activitypubfollow),
|
||||
"id": "https://localhost/follow",
|
||||
"type": "Follow",
|
||||
"actor": "https://localhost/profile",
|
||||
|
@ -115,9 +107,10 @@ class TestEntitiesConvertToAS2:
|
|||
|
||||
def test_follow_to_as2__undo(self, activitypubundofollow):
|
||||
result = activitypubundofollow.to_as2()
|
||||
result["id"] = "https://localhost/undo" # Real object will have a random UUID postfix here
|
||||
result["object"]["id"] = "https://localhost/follow" # Real object will have a random UUID postfix here
|
||||
assert result == {
|
||||
"@context": CONTEXTS_DEFAULT,
|
||||
"@context": context_manager.build_context(activitypubundofollow),
|
||||
"id": "https://localhost/undo",
|
||||
"type": "Undo",
|
||||
"actor": "https://localhost/profile",
|
||||
|
@ -130,29 +123,26 @@ class TestEntitiesConvertToAS2:
|
|||
}
|
||||
|
||||
def test_post_to_as2(self, activitypubpost):
|
||||
activitypubpost.rendered_content = commonmark.commonmark(activitypubpost.raw_content).strip()
|
||||
activitypubpost.pre_send()
|
||||
result = activitypubpost.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubpost),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'cc': ['https://http://127.0.0.1:8000/profile/123456/followers/'],
|
||||
'to': ['https://www.w3.org/ns/activitystreams#Public'],
|
||||
'object': {
|
||||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'cc': ['https://http://127.0.0.1:8000/profile/123456/followers/'],
|
||||
'to': ['https://www.w3.org/ns/activitystreams#Public'],
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<h1>raw_content</h1>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': None,
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [],
|
||||
'url': '',
|
||||
'source': {
|
||||
'content': '# raw_content',
|
||||
'mediaType': 'text/markdown',
|
||||
|
@ -161,37 +151,27 @@ class TestEntitiesConvertToAS2:
|
|||
'published': '2019-04-27T00:00:00',
|
||||
}
|
||||
|
||||
# TODO: fix this test.
|
||||
@pytest.mark.skip
|
||||
def test_post_to_as2__with_mentions(self, activitypubpost_mentions):
|
||||
activitypubpost_mentions.pre_send()
|
||||
result = activitypubpost_mentions.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubpost_mentions),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'object': {
|
||||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<h1>raw_content</h1>\n<p>@{someone@localhost.local} @<a class="mention" '
|
||||
'href="http://localhost.local/someone" rel="nofollow" target="_blank">'
|
||||
'<span>Bob Bobértson</span></a></p>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': None,
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [
|
||||
{
|
||||
"type": "Mention",
|
||||
"href": "http://127.0.0.1:8000/profile/999999",
|
||||
"name": "http://127.0.0.1:8000/profile/999999",
|
||||
},
|
||||
{
|
||||
"type": "Mention",
|
||||
"href": "http://localhost.local/someone",
|
||||
|
@ -208,7 +188,6 @@ class TestEntitiesConvertToAS2:
|
|||
"name": "someone@localhost.local",
|
||||
},
|
||||
],
|
||||
'url': '',
|
||||
'source': {
|
||||
'content': '# raw_content\n\n@{someone@localhost.local} @{http://localhost.local/someone}',
|
||||
'mediaType': 'text/markdown',
|
||||
|
@ -218,35 +197,38 @@ class TestEntitiesConvertToAS2:
|
|||
}
|
||||
|
||||
def test_post_to_as2__with_tags(self, activitypubpost_tags):
|
||||
activitypubpost_tags.rendered_content = '<h1>raw_content</h1>\n' \
|
||||
'<p><a class="hashtag" ' \
|
||||
'href="https://example.com/tag/foobar/" rel="noopener ' \
|
||||
'noreferrer nofollow" ' \
|
||||
'target="_blank">#<span>foobar</span></a>\n' \
|
||||
'<a class="hashtag" ' \
|
||||
'href="https://example.com/tag/barfoo/" rel="noopener ' \
|
||||
'noreferrer nofollow" ' \
|
||||
'target="_blank">#<span>barfoo</span></a></p>'
|
||||
activitypubpost_tags.pre_send()
|
||||
result = activitypubpost_tags.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubpost_tags),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'object': {
|
||||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<h1>raw_content</h1>\n'
|
||||
'<p><a class="mention hashtag" '
|
||||
'<p><a class="hashtag" '
|
||||
'href="https://example.com/tag/foobar/" rel="noopener '
|
||||
'noreferrer nofollow" '
|
||||
'target="_blank">#<span>foobar</span></a>\n'
|
||||
'<a class="mention hashtag" '
|
||||
'<a class="hashtag" '
|
||||
'href="https://example.com/tag/barfoo/" rel="noopener '
|
||||
'noreferrer nofollow" '
|
||||
'target="_blank">#<span>barfoo</span></a></p>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': None,
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [
|
||||
{
|
||||
"type": "Hashtag",
|
||||
|
@ -259,7 +241,6 @@ class TestEntitiesConvertToAS2:
|
|||
"name": "#foobar",
|
||||
},
|
||||
],
|
||||
'url': '',
|
||||
'source': {
|
||||
'content': '# raw_content\n#foobar\n#barfoo',
|
||||
'mediaType': 'text/markdown',
|
||||
|
@ -269,34 +250,26 @@ class TestEntitiesConvertToAS2:
|
|||
}
|
||||
|
||||
def test_post_to_as2__with_images(self, activitypubpost_images):
|
||||
activitypubpost_images.rendered_content = '<p>raw_content</p>'
|
||||
activitypubpost_images.pre_send()
|
||||
result = activitypubpost_images.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubpost_images),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'object': {
|
||||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<p>raw_content</p>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': None,
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [],
|
||||
'url': '',
|
||||
'attachment': [
|
||||
{
|
||||
'type': 'Image',
|
||||
'mediaType': 'image/jpeg',
|
||||
'name': '',
|
||||
'url': 'foobar',
|
||||
'pyfed:inlineImage': False,
|
||||
},
|
||||
|
@ -317,16 +290,11 @@ class TestEntitiesConvertToAS2:
|
|||
}
|
||||
|
||||
def test_post_to_as2__with_diaspora_guid(self, activitypubpost_diaspora_guid):
|
||||
activitypubpost_diaspora_guid.rendered_content = '<p>raw_content</p>'
|
||||
activitypubpost_diaspora_guid.pre_send()
|
||||
result = activitypubpost_diaspora_guid.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
{'Hashtag': 'as:Hashtag'},
|
||||
'https://w3id.org/security/v1',
|
||||
{'sensitive': 'as:sensitive'},
|
||||
{'diaspora': 'https://diasporafoundation.org/ns/'},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubpost_diaspora_guid),
|
||||
'type': 'Create',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#create',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
|
@ -334,14 +302,11 @@ class TestEntitiesConvertToAS2:
|
|||
'id': 'http://127.0.0.1:8000/post/123456/',
|
||||
'diaspora:guid': 'totallyrandomguid',
|
||||
'type': 'Note',
|
||||
'url': 'http://127.0.0.1:8000/post/123456/',
|
||||
'attributedTo': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'content': '<p>raw_content</p>',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
'inReplyTo': None,
|
||||
'sensitive': False,
|
||||
'summary': None,
|
||||
'tag': [],
|
||||
'url': '',
|
||||
'source': {
|
||||
'content': 'raw_content',
|
||||
'mediaType': 'text/markdown',
|
||||
|
@ -350,14 +315,11 @@ class TestEntitiesConvertToAS2:
|
|||
'published': '2019-04-27T00:00:00',
|
||||
}
|
||||
|
||||
@patch("federation.entities.base.fetch_content_type", return_value="image/jpeg")
|
||||
def test_profile_to_as2(self, mock_fetch, activitypubprofile):
|
||||
# noinspection PyUnusedLocal
|
||||
def test_profile_to_as2(self, activitypubprofile):
|
||||
result = activitypubprofile.to_as2()
|
||||
assert result == {
|
||||
"@context": CONTEXTS_DEFAULT + [
|
||||
CONTEXT_LD_SIGNATURES,
|
||||
CONTEXT_MANUALLY_APPROVES_FOLLOWERS,
|
||||
],
|
||||
"@context": context_manager.build_context(activitypubprofile),
|
||||
"endpoints": {
|
||||
"sharedInbox": "https://example.com/public",
|
||||
},
|
||||
|
@ -373,6 +335,7 @@ class TestEntitiesConvertToAS2:
|
|||
"owner": "https://example.com/bob",
|
||||
"publicKeyPem": PUBKEY,
|
||||
},
|
||||
'published': '2022-09-06T00:00:00',
|
||||
"type": "Person",
|
||||
"url": "https://example.com/bob-bobertson",
|
||||
"summary": "foobar",
|
||||
|
@ -380,20 +343,15 @@ class TestEntitiesConvertToAS2:
|
|||
"type": "Image",
|
||||
"url": "urllarge",
|
||||
"mediaType": "image/jpeg",
|
||||
"name": "",
|
||||
"pyfed:inlineImage": False,
|
||||
}
|
||||
}
|
||||
|
||||
@patch("federation.entities.base.fetch_content_type", return_value="image/jpeg")
|
||||
def test_profile_to_as2__with_diaspora_guid(self, mock_fetch, activitypubprofile_diaspora_guid):
|
||||
# noinspection PyUnusedLocal
|
||||
def test_profile_to_as2__with_diaspora_guid(self, activitypubprofile_diaspora_guid):
|
||||
result = activitypubprofile_diaspora_guid.to_as2()
|
||||
assert result == {
|
||||
"@context": CONTEXTS_DEFAULT + [
|
||||
CONTEXT_LD_SIGNATURES,
|
||||
CONTEXT_MANUALLY_APPROVES_FOLLOWERS,
|
||||
CONTEXT_DIASPORA,
|
||||
],
|
||||
"@context": context_manager.build_context(activitypubprofile_diaspora_guid),
|
||||
"endpoints": {
|
||||
"sharedInbox": "https://example.com/public",
|
||||
},
|
||||
|
@ -411,6 +369,7 @@ class TestEntitiesConvertToAS2:
|
|||
"owner": "https://example.com/bob",
|
||||
"publicKeyPem": PUBKEY,
|
||||
},
|
||||
'published': '2022-09-06T00:00:00',
|
||||
"type": "Person",
|
||||
"url": "https://example.com/bob-bobertson",
|
||||
"summary": "foobar",
|
||||
|
@ -418,7 +377,6 @@ class TestEntitiesConvertToAS2:
|
|||
"type": "Image",
|
||||
"url": "urllarge",
|
||||
"mediaType": "image/jpeg",
|
||||
"name": "",
|
||||
"pyfed:inlineImage": False,
|
||||
}
|
||||
}
|
||||
|
@ -426,10 +384,7 @@ class TestEntitiesConvertToAS2:
|
|||
def test_retraction_to_as2(self, activitypubretraction):
|
||||
result = activitypubretraction.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubretraction),
|
||||
'type': 'Delete',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#delete',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
|
@ -443,31 +398,30 @@ class TestEntitiesConvertToAS2:
|
|||
def test_retraction_to_as2__announce(self, activitypubretraction_announce):
|
||||
result = activitypubretraction_announce.to_as2()
|
||||
assert result == {
|
||||
'@context': [
|
||||
'https://www.w3.org/ns/activitystreams',
|
||||
{"pyfed": "https://docs.jasonrobinson.me/ns/python-federation"},
|
||||
],
|
||||
'@context': context_manager.build_context(activitypubretraction_announce),
|
||||
'type': 'Undo',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/#delete',
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'object': {
|
||||
'actor': 'http://127.0.0.1:8000/profile/123456/',
|
||||
'id': 'http://127.0.0.1:8000/post/123456/activity',
|
||||
'object': 'http://127.0.0.1:8000/post/123456',
|
||||
'type': 'Announce',
|
||||
'published': '2019-04-27T00:00:00',
|
||||
},
|
||||
'published': '2019-04-27T00:00:00',
|
||||
}
|
||||
|
||||
|
||||
class TestEntitiesPostReceive:
|
||||
@patch("federation.utils.activitypub.retrieve_and_parse_profile", autospec=True)
|
||||
@patch("federation.entities.activitypub.entities.handle_send", autospec=True)
|
||||
@patch("federation.entities.activitypub.models.retrieve_and_parse_profile", autospec=True)
|
||||
@patch("federation.entities.activitypub.models.handle_send", autospec=True)
|
||||
def test_follow_post_receive__sends_correct_accept_back(
|
||||
self, mock_send, mock_retrieve, activitypubfollow, profile
|
||||
):
|
||||
mock_retrieve.return_value = profile
|
||||
activitypubfollow.post_receive()
|
||||
args, kwargs = mock_send.call_args_list[0]
|
||||
assert isinstance(args[0], ActivitypubAccept)
|
||||
assert isinstance(args[0], Accept)
|
||||
assert args[0].activity_id.startswith("https://example.com/profile#accept-")
|
||||
assert args[0].actor_id == "https://example.com/profile"
|
||||
assert args[0].target_id == "https://localhost/follow"
|
||||
|
|
|
@ -1,23 +1,29 @@
|
|||
from datetime import datetime
|
||||
from unittest.mock import patch, Mock
|
||||
from unittest.mock import patch, Mock, DEFAULT
|
||||
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from federation.entities.activitypub.entities import (
|
||||
ActivitypubFollow, ActivitypubAccept, ActivitypubProfile, ActivitypubPost, ActivitypubComment,
|
||||
ActivitypubRetraction, ActivitypubShare)
|
||||
from federation.entities.activitypub.models import Person
|
||||
|
||||
|
||||
#from federation.entities.activitypub.entities import (
|
||||
# models.Follow, models.Accept, models.Person, models.Note, models.Note,
|
||||
# models.Delete, models.Announce)
|
||||
import federation.entities.activitypub.models as models
|
||||
from federation.entities.activitypub.mappers import message_to_objects, get_outbound_entity
|
||||
from federation.entities.base import Accept, Follow, Profile, Post, Comment, Image, Share
|
||||
from federation.entities.base import Accept, Follow, Profile, Post, Comment, Image, Share, Retraction
|
||||
from federation.tests.fixtures.payloads import (
|
||||
ACTIVITYPUB_FOLLOW, ACTIVITYPUB_PROFILE, ACTIVITYPUB_PROFILE_INVALID, ACTIVITYPUB_UNDO_FOLLOW, ACTIVITYPUB_POST,
|
||||
ACTIVITYPUB_COMMENT, ACTIVITYPUB_RETRACTION, ACTIVITYPUB_SHARE, ACTIVITYPUB_RETRACTION_SHARE,
|
||||
ACTIVITYPUB_POST_IMAGES, ACTIVITYPUB_POST_WITH_SOURCE_MARKDOWN, ACTIVITYPUB_POST_WITH_TAGS,
|
||||
ACTIVITYPUB_POST_WITH_SOURCE_BBCODE, ACTIVITYPUB_POST_WITH_MENTIONS, ACTIVITYPUB_PROFILE_WITH_DIASPORA_GUID)
|
||||
ACTIVITYPUB_POST_WITH_SOURCE_BBCODE, ACTIVITYPUB_POST_WITH_MENTIONS, ACTIVITYPUB_PROFILE_WITH_DIASPORA_GUID,
|
||||
ACTIVITYPUB_REMOTE_PROFILE, ACTIVITYPUB_COLLECTION)
|
||||
from federation.types import UserType, ReceiverVariant
|
||||
|
||||
|
||||
class TestActivitypubEntityMappersReceive:
|
||||
@patch.object(ActivitypubFollow, "post_receive", autospec=True)
|
||||
@patch.object(models.Follow, "post_receive", autospec=True)
|
||||
def test_message_to_objects__calls_post_receive_hook(self, mock_post_receive):
|
||||
message_to_objects(ACTIVITYPUB_FOLLOW, "https://example.com/actor")
|
||||
assert mock_post_receive.called
|
||||
|
@ -26,7 +32,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_SHARE, "https://mastodon.social/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
entity = entities[0]
|
||||
assert isinstance(entity, ActivitypubShare)
|
||||
assert isinstance(entity, models.Announce)
|
||||
assert entity.actor_id == "https://mastodon.social/users/jaywink"
|
||||
assert entity.target_id == "https://mastodon.social/users/Gargron/statuses/102559779793316012"
|
||||
assert entity.id == "https://mastodon.social/users/jaywink/statuses/102560701449465612/activity"
|
||||
|
@ -38,7 +44,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_FOLLOW, "https://example.com/actor")
|
||||
assert len(entities) == 1
|
||||
entity = entities[0]
|
||||
assert isinstance(entity, ActivitypubFollow)
|
||||
assert isinstance(entity, models.Follow)
|
||||
assert entity.actor_id == "https://example.com/actor"
|
||||
assert entity.target_id == "https://example.org/actor"
|
||||
assert entity.following is True
|
||||
|
@ -47,7 +53,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_UNDO_FOLLOW, "https://example.com/actor")
|
||||
assert len(entities) == 1
|
||||
entity = entities[0]
|
||||
assert isinstance(entity, ActivitypubFollow)
|
||||
assert isinstance(entity, models.Follow)
|
||||
assert entity.actor_id == "https://example.com/actor"
|
||||
assert entity.target_id == "https://example.org/actor"
|
||||
assert entity.following is False
|
||||
|
@ -65,13 +71,11 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_POST, "https://diaspodon.fr/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
post = entities[0]
|
||||
assert isinstance(post, ActivitypubPost)
|
||||
assert isinstance(post, models.Note)
|
||||
assert isinstance(post, Post)
|
||||
assert post.raw_content == '<p><span class="h-card"><a class="u-url mention" ' \
|
||||
'href="https://dev.jasonrobinson.me/u/jaywink/">' \
|
||||
'@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.rendered_content == '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" ' \
|
||||
'class="u-url mention">@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.raw_content == ''
|
||||
assert post.rendered_content == '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">' \
|
||||
'@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.id == "https://diaspodon.fr/users/jaywink/statuses/102356911717767237"
|
||||
assert post.actor_id == "https://diaspodon.fr/users/jaywink"
|
||||
assert post.public is True
|
||||
|
@ -82,40 +86,46 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_POST_WITH_TAGS, "https://diaspodon.fr/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
post = entities[0]
|
||||
assert isinstance(post, ActivitypubPost)
|
||||
assert isinstance(post, models.Note)
|
||||
assert isinstance(post, Post)
|
||||
assert post.raw_content == '<p>boom #test</p>'
|
||||
assert post.raw_content == ''
|
||||
assert post.rendered_content == '<p>boom <a class="mention hashtag" data-hashtag="test" href="https://mastodon.social/tags/test" rel="tag">#<span>test</span></a></p>'
|
||||
|
||||
def test_message_to_objects_simple_post__with_mentions(self):
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity",
|
||||
return_value=Person(finger="jaywink@dev3.jasonrobinson.me",url="https://dev3.jasonrobinson.me/u/jaywink/"))
|
||||
def test_message_to_objects_simple_post__with_mentions(self, mock_get):
|
||||
entities = message_to_objects(ACTIVITYPUB_POST_WITH_MENTIONS, "https://mastodon.social/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
post = entities[0]
|
||||
assert isinstance(post, ActivitypubPost)
|
||||
assert isinstance(post, models.Note)
|
||||
assert isinstance(post, Post)
|
||||
assert len(post._mentions) == 1
|
||||
assert list(post._mentions)[0] == "https://dev3.jasonrobinson.me/u/jaywink/"
|
||||
assert list(post._mentions)[0] == "jaywink@dev3.jasonrobinson.me"
|
||||
|
||||
def test_message_to_objects_simple_post__with_source__bbcode(self):
|
||||
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity",
|
||||
return_value=Person(finger="jaywink@dev.jasonrobinson.me",url="https://dev.jasonrobinson.me/u/jaywink/"))
|
||||
def test_message_to_objects_simple_post__with_source__bbcode(self, mock_get):
|
||||
entities = message_to_objects(ACTIVITYPUB_POST_WITH_SOURCE_BBCODE, "https://diaspodon.fr/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
post = entities[0]
|
||||
assert isinstance(post, ActivitypubPost)
|
||||
assert isinstance(post, models.Note)
|
||||
assert isinstance(post, Post)
|
||||
assert post.rendered_content == '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" ' \
|
||||
'class="u-url mention">@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.raw_content == '<p><span class="h-card"><a class="u-url mention" ' \
|
||||
'href="https://dev.jasonrobinson.me/u/jaywink/">' \
|
||||
'@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.rendered_content == '<p><span class="h-card"><a class="u-url mention" data-mention="jaywink@dev.jasonrobinson.me" href="https://dev.jasonrobinson.me/u/jaywink/">' \
|
||||
'@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.raw_content == ''
|
||||
|
||||
def test_message_to_objects_simple_post__with_source__markdown(self):
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity",
|
||||
return_value=Person(finger="jaywink@dev.jasonrobinson.me",url="https://dev.robinson.me/u/jaywink/"))
|
||||
def test_message_to_objects_simple_post__with_source__markdown(self, mock_get):
|
||||
entities = message_to_objects(ACTIVITYPUB_POST_WITH_SOURCE_MARKDOWN, "https://diaspodon.fr/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
post = entities[0]
|
||||
assert isinstance(post, ActivitypubPost)
|
||||
assert isinstance(post, models.Note)
|
||||
assert isinstance(post, Post)
|
||||
assert post.rendered_content == '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" ' \
|
||||
'class="u-url mention">@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.raw_content == "@jaywink boom"
|
||||
assert post.rendered_content == '<p><span class="h-card"><a class="u-url mention" ' \
|
||||
'href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>'
|
||||
assert post.raw_content == "@jaywink@dev.jasonrobinson.me boom"
|
||||
assert post.id == "https://diaspodon.fr/users/jaywink/statuses/102356911717767237"
|
||||
assert post.actor_id == "https://diaspodon.fr/users/jaywink"
|
||||
assert post.public is True
|
||||
|
@ -126,8 +136,9 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_POST_IMAGES, "https://mastodon.social/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
post = entities[0]
|
||||
assert isinstance(post, ActivitypubPost)
|
||||
assert len(post._children) == 1
|
||||
assert isinstance(post, models.Note)
|
||||
# TODO: test video and audio attachment
|
||||
assert len(post._children) == 2
|
||||
photo = post._children[0]
|
||||
assert isinstance(photo, Image)
|
||||
assert photo.url == "https://files.mastodon.social/media_attachments/files/017/642/079/original/" \
|
||||
|
@ -139,15 +150,18 @@ class TestActivitypubEntityMappersReceive:
|
|||
assert photo.guid == ""
|
||||
assert photo.handle == ""
|
||||
|
||||
def test_message_to_objects_comment(self):
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity",
|
||||
return_value=Person(finger="jaywink@dev.jasonrobinson.me", url="https://dev.jasonrobinson.me/u/jaywink/"))
|
||||
def test_message_to_objects_comment(self, mock_get):
|
||||
entities = message_to_objects(ACTIVITYPUB_COMMENT, "https://diaspodon.fr/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
comment = entities[0]
|
||||
assert isinstance(comment, ActivitypubComment)
|
||||
assert isinstance(comment, models.Note)
|
||||
assert isinstance(comment, Comment)
|
||||
assert comment.raw_content == '<p><span class="h-card"><a class="u-url mention" ' \
|
||||
assert comment.rendered_content == '<p><span class="h-card"><a class="u-url mention" data-mention="jaywink@dev.jasonrobinson.me" ' \
|
||||
'href="https://dev.jasonrobinson.me/u/jaywink/">' \
|
||||
'@<span>jaywink</span></a></span> boom</p>'
|
||||
assert comment.raw_content == ''
|
||||
assert comment.id == "https://diaspodon.fr/users/jaywink/statuses/102356911717767237"
|
||||
assert comment.actor_id == "https://diaspodon.fr/users/jaywink"
|
||||
assert comment.target_id == "https://dev.jasonrobinson.me/content/653bad70-41b3-42c9-89cb-c4ee587e68e4/"
|
||||
|
@ -181,7 +195,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
"private": "https://diaspodon.fr/users/jaywink/inbox",
|
||||
"public": "https://diaspodon.fr/inbox",
|
||||
}
|
||||
assert profile.handle == ""
|
||||
assert profile.handle == None
|
||||
assert profile.name == "Jason Robinson"
|
||||
assert profile.image_urls == {
|
||||
"large": "https://diaspodon.fr/system/accounts/avatars/000/033/155/original/pnc__picked_media_be51984c-4"
|
||||
|
@ -215,7 +229,22 @@ class TestActivitypubEntityMappersReceive:
|
|||
assert profile.id == "https://friendica.feneas.org/profile/feneas"
|
||||
assert profile.guid == "76158462365bd347844d248732383358"
|
||||
|
||||
def test_message_to_objects_receivers_are_saved(self):
|
||||
#@patch('federation.tests.django.utils.get_profile', return_value=None)
|
||||
@patch('federation.entities.activitypub.models.get_profile', return_value=None)
|
||||
@patch('federation.utils.activitypub.fetch_document')
|
||||
def test_message_to_objects_receivers_are_saved(self, mock_fetch, mock_func):
|
||||
def side_effect(*args, **kwargs):
|
||||
payloads = {'https://diaspodon.fr/users/jaywink': json.dumps(ACTIVITYPUB_PROFILE),
|
||||
'https://fosstodon.org/users/astdenis': json.dumps(ACTIVITYPUB_REMOTE_PROFILE),
|
||||
'https://diaspodon.fr/users/jaywink/followers': json.dumps(ACTIVITYPUB_COLLECTION),
|
||||
}
|
||||
if args[0] in payloads.keys():
|
||||
return payloads[args[0]], 200, None
|
||||
else:
|
||||
return None, None, 'Nothing here'
|
||||
|
||||
mock_fetch.side_effect = side_effect
|
||||
|
||||
# noinspection PyTypeChecker
|
||||
entities = message_to_objects(
|
||||
ACTIVITYPUB_POST,
|
||||
|
@ -228,7 +257,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
id='https://diaspodon.fr/users/jaywink', receiver_variant=ReceiverVariant.FOLLOWERS,
|
||||
),
|
||||
UserType(
|
||||
id='https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/',
|
||||
id='https://fosstodon.org/users/astdenis',
|
||||
receiver_variant=ReceiverVariant.ACTOR,
|
||||
)
|
||||
}
|
||||
|
@ -237,7 +266,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_RETRACTION, "https://friendica.feneas.org/profile/jaywink")
|
||||
assert len(entities) == 1
|
||||
entity = entities[0]
|
||||
assert isinstance(entity, ActivitypubRetraction)
|
||||
assert isinstance(entity, Retraction)
|
||||
assert entity.actor_id == "https://friendica.feneas.org/profile/jaywink"
|
||||
assert entity.target_id == "https://friendica.feneas.org/objects/76158462-165d-3386-aa23-ba2090614385"
|
||||
assert entity.entity_type == "Object"
|
||||
|
@ -246,7 +275,7 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_RETRACTION_SHARE, "https://mastodon.social/users/jaywink")
|
||||
assert len(entities) == 1
|
||||
entity = entities[0]
|
||||
assert isinstance(entity, ActivitypubRetraction)
|
||||
assert isinstance(entity, Retraction)
|
||||
assert entity.actor_id == "https://mastodon.social/users/jaywink"
|
||||
assert entity.target_id == "https://mastodon.social/users/jaywink/statuses/102571932479036987/activity"
|
||||
assert entity.entity_type == "Object"
|
||||
|
@ -270,6 +299,8 @@ class TestActivitypubEntityMappersReceive:
|
|||
entities = message_to_objects(ACTIVITYPUB_PROFILE, "http://example.com/1234")
|
||||
assert entities[0]._source_protocol == "activitypub"
|
||||
|
||||
@pytest.mark.skip
|
||||
# since calamus turns the whole payload into objects, the source payload is not kept
|
||||
def test_source_object(self):
|
||||
entities = message_to_objects(ACTIVITYPUB_PROFILE, "http://example.com/1234")
|
||||
entity = entities[0]
|
||||
|
@ -293,30 +324,30 @@ class TestActivitypubEntityMappersReceive:
|
|||
|
||||
class TestGetOutboundEntity:
|
||||
def test_already_fine_entities_are_returned_as_is(self, private_key):
|
||||
entity = ActivitypubAccept()
|
||||
entity = models.Accept()
|
||||
entity.validate = Mock()
|
||||
assert get_outbound_entity(entity, private_key) == entity
|
||||
entity = ActivitypubFollow()
|
||||
entity = models.Follow()
|
||||
entity.validate = Mock()
|
||||
assert get_outbound_entity(entity, private_key) == entity
|
||||
entity = ActivitypubProfile()
|
||||
entity = models.Person()
|
||||
entity.validate = Mock()
|
||||
assert get_outbound_entity(entity, private_key) == entity
|
||||
|
||||
@patch.object(ActivitypubAccept, "validate", new=Mock())
|
||||
@patch.object(models.Accept, "validate", new=Mock())
|
||||
def test_accept_is_converted_to_activitypubaccept(self, private_key):
|
||||
entity = Accept()
|
||||
assert isinstance(get_outbound_entity(entity, private_key), ActivitypubAccept)
|
||||
assert isinstance(get_outbound_entity(entity, private_key), models.Accept)
|
||||
|
||||
@patch.object(ActivitypubFollow, "validate", new=Mock())
|
||||
@patch.object(models.Follow, "validate", new=Mock())
|
||||
def test_follow_is_converted_to_activitypubfollow(self, private_key):
|
||||
entity = Follow()
|
||||
assert isinstance(get_outbound_entity(entity, private_key), ActivitypubFollow)
|
||||
assert isinstance(get_outbound_entity(entity, private_key), models.Follow)
|
||||
|
||||
@patch.object(ActivitypubProfile, "validate", new=Mock())
|
||||
@patch.object(models.Person, "validate", new=Mock())
|
||||
def test_profile_is_converted_to_activitypubprofile(self, private_key):
|
||||
entity = Profile()
|
||||
assert isinstance(get_outbound_entity(entity, private_key), ActivitypubProfile)
|
||||
assert isinstance(get_outbound_entity(entity, private_key), models.Person)
|
||||
|
||||
def test_entity_is_validated__fail(self, private_key):
|
||||
entity = Share(
|
||||
|
|
|
@ -13,22 +13,17 @@ from federation.entities.utils import get_base_attributes
|
|||
|
||||
|
||||
class TestGetBaseAttributes:
|
||||
def test_get_base_attributes_returns_only_intended_attributes(self):
|
||||
entity = Post()
|
||||
def test_get_base_attributes_returns_only_intended_attributes(self, diasporapost, diasporaprofile):
|
||||
entity = diasporapost
|
||||
attrs = get_base_attributes(entity).keys()
|
||||
assert set(attrs) == {
|
||||
"created_at", "location", "provider_display_name", "public", "raw_content",
|
||||
"signature", "base_url", "actor_id", "id", "handle", "guid", "activity", "activity_id",
|
||||
"url",
|
||||
}
|
||||
entity = Profile()
|
||||
'activity', 'actor_id', 'created_at', 'guid', 'handle', 'id',
|
||||
'provider_display_name', 'public', 'raw_content'}
|
||||
entity = diasporaprofile
|
||||
attrs = get_base_attributes(entity).keys()
|
||||
assert set(attrs) == {
|
||||
"created_at", "name", "email", "gender", "raw_content", "location", "public",
|
||||
"nsfw", "public_key", "image_urls", "tag_list", "signature", "url", "atom_url",
|
||||
"base_url", "id", "actor_id", "handle", "handle", "guid", "activity", "activity_id", "username",
|
||||
"inboxes",
|
||||
}
|
||||
'created_at', 'guid', 'handle', 'id', 'image_urls', 'inboxes',
|
||||
'name', 'nsfw', 'public', 'raw_content', 'tag_list'}
|
||||
|
||||
|
||||
class TestGetFullXMLRepresentation:
|
||||
|
|
|
@ -120,3 +120,10 @@ class TestShareEntity:
|
|||
def test_instance_creation(self):
|
||||
entity = ShareFactory()
|
||||
entity.validate()
|
||||
|
||||
|
||||
class TestRawContentMixin:
|
||||
@pytest.mark.skip
|
||||
def test_rendered_content(self, post):
|
||||
assert post.rendered_content == """<p>One more test before sleep 😅 This time with an image.</p>
|
||||
<p><img src="https://jasonrobinson.me/media/uploads/2020/12/27/1b2326c6-554c-4448-9da3-bdacddf2bb77.jpeg" alt=""></p>"""
|
||||
|
|
|
@ -87,3 +87,4 @@ class ShareFactory(ActorIDMixinFactory, EntityTypeMixinFactory, IDMixinFactory,
|
|||
|
||||
raw_content = ""
|
||||
provider_display_name = ""
|
||||
to = ["https://www.w3.org/ns/activitystreams#Public"]
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import pytest
|
||||
# noinspection PyPackageRequirements
|
||||
from commonmark import commonmark
|
||||
from freezegun import freeze_time
|
||||
from unittest.mock import patch
|
||||
|
||||
from federation.entities.activitypub.entities import (
|
||||
ActivitypubPost, ActivitypubAccept, ActivitypubFollow, ActivitypubProfile, ActivitypubComment,
|
||||
ActivitypubRetraction, ActivitypubShare, ActivitypubImage)
|
||||
from federation.entities.base import Profile
|
||||
from federation.entities.activitypub.mappers import get_outbound_entity
|
||||
import federation.entities.activitypub.models as models
|
||||
from federation.entities.base import Profile, Post, Comment, Retraction
|
||||
from federation.entities.diaspora.entities import (
|
||||
DiasporaPost, DiasporaComment, DiasporaLike, DiasporaProfile, DiasporaRetraction,
|
||||
DiasporaContact, DiasporaReshare,
|
||||
|
@ -17,8 +19,8 @@ from federation.tests.fixtures.payloads import DIASPORA_PUBLIC_PAYLOAD
|
|||
@pytest.fixture
|
||||
def activitypubannounce():
|
||||
with freeze_time("2019-08-05"):
|
||||
return ActivitypubShare(
|
||||
activity_id="http://127.0.0.1:8000/post/123456/#create",
|
||||
return models.Announce(
|
||||
id="http://127.0.0.1:8000/post/123456/#create",
|
||||
actor_id="http://127.0.0.1:8000/profile/123456/",
|
||||
target_id="http://127.0.0.1:8000/post/012345/",
|
||||
)
|
||||
|
@ -27,8 +29,9 @@ def activitypubannounce():
|
|||
@pytest.fixture
|
||||
def activitypubcomment():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubComment(
|
||||
obj = models.Comment(
|
||||
raw_content="raw_content",
|
||||
rendered_content="<p>raw_content</p>",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
id=f"http://127.0.0.1:8000/post/123456/",
|
||||
|
@ -36,11 +39,13 @@ def activitypubcomment():
|
|||
actor_id=f"http://127.0.0.1:8000/profile/123456/",
|
||||
target_id="http://127.0.0.1:8000/post/012345/",
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubfollow():
|
||||
return ActivitypubFollow(
|
||||
return models.Follow(
|
||||
activity_id="https://localhost/follow",
|
||||
actor_id="https://localhost/profile",
|
||||
target_id="https://example.com/profile",
|
||||
|
@ -49,31 +54,36 @@ def activitypubfollow():
|
|||
|
||||
@pytest.fixture
|
||||
def activitypubaccept(activitypubfollow):
|
||||
return ActivitypubAccept(
|
||||
return models.Accept(
|
||||
activity_id="https://localhost/accept",
|
||||
actor_id="https://localhost/profile",
|
||||
target_id="https://example.com/follow/1234",
|
||||
object=activitypubfollow.to_as2(),
|
||||
object_=activitypubfollow,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubpost():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubPost(
|
||||
obj = models.Post(
|
||||
raw_content="# raw_content",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
id=f"http://127.0.0.1:8000/post/123456/",
|
||||
activity_id=f"http://127.0.0.1:8000/post/123456/#create",
|
||||
actor_id=f"http://127.0.0.1:8000/profile/123456/",
|
||||
_media_type="text/markdown",
|
||||
to=["https://www.w3.org/ns/activitystreams#Public"],
|
||||
cc=["https://http://127.0.0.1:8000/profile/123456/followers/"]
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubpost_diaspora_guid():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubPost(
|
||||
obj = models.Post(
|
||||
raw_content="raw_content",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
|
@ -82,12 +92,14 @@ def activitypubpost_diaspora_guid():
|
|||
actor_id=f"http://127.0.0.1:8000/profile/123456/",
|
||||
guid="totallyrandomguid",
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubpost_images():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubPost(
|
||||
obj = models.Post(
|
||||
raw_content="raw_content",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
|
@ -95,34 +107,38 @@ def activitypubpost_images():
|
|||
activity_id=f"http://127.0.0.1:8000/post/123456/#create",
|
||||
actor_id=f"http://127.0.0.1:8000/profile/123456/",
|
||||
_children=[
|
||||
ActivitypubImage(url="foobar", media_type="image/jpeg"),
|
||||
ActivitypubImage(url="barfoo", name="spam and eggs", media_type="image/jpeg"),
|
||||
models.Image(url="foobar", media_type="image/jpeg"),
|
||||
models.Image(url="barfoo", name="spam and eggs", media_type="image/jpeg"),
|
||||
],
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubpost_mentions():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubPost(
|
||||
raw_content="""# raw_content\n\n@{someone@localhost.local} @{http://localhost.local/someone}""",
|
||||
obj = models.Post(
|
||||
raw_content="""# raw_content\n\n@someone@localhost.local @jaywink@localhost.local""",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
id=f"http://127.0.0.1:8000/post/123456/",
|
||||
activity_id=f"http://127.0.0.1:8000/post/123456/#create",
|
||||
actor_id=f"http://127.0.0.1:8000/profile/123456/",
|
||||
_mentions={
|
||||
"http://127.0.0.1:8000/profile/999999",
|
||||
"jaywink@localhost.local",
|
||||
"http://localhost.local/someone",
|
||||
}
|
||||
# _mentions={
|
||||
# "http://127.0.0.1:8000/profile/999999",
|
||||
# "jaywink@localhost.local",
|
||||
# "http://localhost.local/someone",
|
||||
# }
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubpost_tags():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubPost(
|
||||
obj = models.Post(
|
||||
raw_content="# raw_content\n#foobar\n#barfoo",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
|
@ -130,13 +146,14 @@ def activitypubpost_tags():
|
|||
activity_id=f"http://127.0.0.1:8000/post/123456/#create",
|
||||
actor_id=f"http://127.0.0.1:8000/profile/123456/",
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubpost_embedded_images():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubPost(
|
||||
raw_content="""
|
||||
raw_content="""
|
||||
#Cycling #lauttasaari #sea #sun
|
||||
|
||||
|
||||
|
@ -149,67 +166,79 @@ def activitypubpost_embedded_images():
|
|||
[foo](https://jasonrobinson.me/media/uploads/2019/07/16/daa24d89-cedf-4fc7-bad8-74a9025414710.jpg)
|
||||
#only a link, not embedded
|
||||
https://jasonrobinson.me/media/uploads/2019/07/16/daa24d89-cedf-4fc7-bad8-74a9025414711.jpg
|
||||
""",
|
||||
"""
|
||||
obj = models.Post(
|
||||
raw_content=raw_content,
|
||||
rendered_content=commonmark(raw_content, ignore_html_blocks=True),
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
id=f"http://127.0.0.1:8000/post/123456/",
|
||||
activity_id=f"http://127.0.0.1:8000/post/123456/#create",
|
||||
actor_id=f"https://jasonrobinson.me/u/jaywink/",
|
||||
)
|
||||
obj.times={'edited':False, 'created':obj.created_at}
|
||||
return obj
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubprofile():
|
||||
return ActivitypubProfile(
|
||||
id="https://example.com/bob", raw_content="foobar", name="Bob Bobertson", public=True,
|
||||
tag_list=["socialfederation", "federation"], image_urls={
|
||||
"large": "urllarge", "medium": "urlmedium", "small": "urlsmall"
|
||||
}, inboxes={
|
||||
"private": "https://example.com/bob/private",
|
||||
"public": "https://example.com/public",
|
||||
}, public_key=PUBKEY, url="https://example.com/bob-bobertson"
|
||||
)
|
||||
@patch.object(models.base.Image, 'get_media_type', return_value="image/jpeg")
|
||||
def activitypubprofile(mock_fetch):
|
||||
with freeze_time("2022-09-06"):
|
||||
return models.Person(
|
||||
id="https://example.com/bob", raw_content="foobar", name="Bob Bobertson", public=True,
|
||||
tag_list=["socialfederation", "federation"], image_urls={
|
||||
"large": "urllarge", "medium": "urlmedium", "small": "urlsmall"
|
||||
}, inboxes={
|
||||
"private": "https://example.com/bob/private",
|
||||
"public": "https://example.com/public",
|
||||
}, public_key=PUBKEY, url="https://example.com/bob-bobertson"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubprofile_diaspora_guid():
|
||||
return ActivitypubProfile(
|
||||
id="https://example.com/bob", raw_content="foobar", name="Bob Bobertson", public=True,
|
||||
tag_list=["socialfederation", "federation"], image_urls={
|
||||
"large": "urllarge", "medium": "urlmedium", "small": "urlsmall"
|
||||
}, inboxes={
|
||||
"private": "https://example.com/bob/private",
|
||||
"public": "https://example.com/public",
|
||||
}, public_key=PUBKEY, url="https://example.com/bob-bobertson",
|
||||
guid="totallyrandomguid", handle="bob@example.com",
|
||||
)
|
||||
@patch.object(models.base.Image, 'get_media_type', return_value="image/jpeg")
|
||||
def activitypubprofile_diaspora_guid(mock_fetch):
|
||||
with freeze_time("2022-09-06"):
|
||||
return models.Person(
|
||||
id="https://example.com/bob", raw_content="foobar", name="Bob Bobertson", public=True,
|
||||
tag_list=["socialfederation", "federation"], image_urls={
|
||||
"large": "urllarge", "medium": "urlmedium", "small": "urlsmall"
|
||||
}, inboxes={
|
||||
"private": "https://example.com/bob/private",
|
||||
"public": "https://example.com/public",
|
||||
}, public_key=PUBKEY, url="https://example.com/bob-bobertson",
|
||||
guid="totallyrandomguid", handle="bob@example.com",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubretraction():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubRetraction(
|
||||
obj = Retraction(
|
||||
target_id="http://127.0.0.1:8000/post/123456/",
|
||||
activity_id="http://127.0.0.1:8000/post/123456/#delete",
|
||||
actor_id="http://127.0.0.1:8000/profile/123456/",
|
||||
entity_type="Post",
|
||||
)
|
||||
return get_outbound_entity(obj, None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubretraction_announce():
|
||||
with freeze_time("2019-04-27"):
|
||||
return ActivitypubRetraction(
|
||||
target_id="http://127.0.0.1:8000/post/123456/activity",
|
||||
obj = Retraction(
|
||||
id="http://127.0.0.1:8000/post/123456/activity",
|
||||
target_id="http://127.0.0.1:8000/post/123456",
|
||||
activity_id="http://127.0.0.1:8000/post/123456/#delete",
|
||||
actor_id="http://127.0.0.1:8000/profile/123456/",
|
||||
entity_type="Share",
|
||||
)
|
||||
return get_outbound_entity(obj, None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activitypubundofollow():
|
||||
return ActivitypubFollow(
|
||||
return models.Follow(
|
||||
activity_id="https://localhost/undo",
|
||||
actor_id="https://localhost/profile",
|
||||
target_id="https://example.com/profile",
|
||||
|
@ -230,7 +259,8 @@ def profile():
|
|||
inboxes={
|
||||
"private": "https://example.com/bob/private",
|
||||
"public": "https://example.com/public",
|
||||
}, public_key=PUBKEY,
|
||||
}, public_key=PUBKEY, to=["https://www.w3.org/ns/activitystreams#Public"],
|
||||
url="https://example.com/alice"
|
||||
)
|
||||
|
||||
|
||||
|
@ -376,6 +406,21 @@ def diasporaretraction():
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def post():
|
||||
return models.Post(
|
||||
raw_content="""One more test before sleep 😅 This time with an image.
|
||||
|
||||
![](https://jasonrobinson.me/media/uploads/2020/12/27/1b2326c6-554c-4448-9da3-bdacddf2bb77.jpeg)""",
|
||||
public=True,
|
||||
provider_display_name="Socialhome",
|
||||
id="guid",
|
||||
guid="guid",
|
||||
actor_id="alice@example.com",
|
||||
handle="alice@example.com",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def share():
|
||||
return ShareFactory()
|
||||
|
|
|
@ -69,3 +69,7 @@ XML2 = "<comment><guid>d728fe501584013514526c626dd55703</guid><parent_guid>d641b
|
|||
|
||||
def get_dummy_private_key():
|
||||
return RSA.importKey(PRIVATE_KEY)
|
||||
|
||||
|
||||
def get_dummy_public_key():
|
||||
return PUBKEY
|
||||
|
|
|
@ -31,11 +31,11 @@ ACTIVITYPUB_COMMENT = {
|
|||
'atomUri': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237',
|
||||
'inReplyToAtomUri': 'https://dev.jasonrobinson.me/content/653bad70-41b3-42c9-89cb-c4ee587e68e4/',
|
||||
'conversation': 'tag:diaspodon.fr,2019-06-28:objectId=2347687:objectType=Conversation',
|
||||
'content': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>',
|
||||
'contentMap': {'en': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'content': '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>',
|
||||
'contentMap': {'en': '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'attachment': [],
|
||||
'tag': [{'type': 'Mention',
|
||||
'href': 'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/',
|
||||
'href': 'https://dev.jasonrobinson.me/u/jaywink/',
|
||||
'name': '@jaywink@dev.jasonrobinson.me'}],
|
||||
'replies': {'id': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237/replies',
|
||||
'type': 'Collection',
|
||||
|
@ -128,6 +128,85 @@ ACTIVITYPUB_PROFILE = {
|
|||
}
|
||||
}
|
||||
|
||||
ACTIVITYPUB_REMOTE_PROFILE = {
|
||||
"@context": ["https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
{"Curve25519Key": "toot:Curve25519Key",
|
||||
"Device": "toot:Device",
|
||||
"Ed25519Key": "toot:Ed25519Key",
|
||||
"Ed25519Signature": "toot:Ed25519Signature",
|
||||
"EncryptedMessage": "toot:EncryptedMessage",
|
||||
"PropertyValue": "schema:PropertyValue",
|
||||
"alsoKnownAs": {"@id": "as:alsoKnownAs", "@type": "@id"},
|
||||
"cipherText": "toot:cipherText",
|
||||
"claim": {"@id": "toot:claim", "@type": "@id"},
|
||||
"deviceId": "toot:deviceId",
|
||||
"devices": {"@id": "toot:devices", "@type": "@id"},
|
||||
"discoverable": "toot:discoverable",
|
||||
"featured": {"@id": "toot:featured", "@type": "@id"},
|
||||
"featuredTags": {"@id": "toot:featuredTags", "@type": "@id"},
|
||||
"fingerprintKey": {"@id": "toot:fingerprintKey", "@type": "@id"},
|
||||
"focalPoint": {"@container": "@list", "@id": "toot:focalPoint"},
|
||||
"identityKey": {"@id": "toot:identityKey", "@type": "@id"},
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
"messageFranking": "toot:messageFranking",
|
||||
"messageType": "toot:messageType",
|
||||
"movedTo": {"@id": "as:movedTo", "@type": "@id"},
|
||||
"publicKeyBase64": "toot:publicKeyBase64",
|
||||
"schema": "http://schema.org#",
|
||||
"suspended": "toot:suspended",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"value": "schema:value"}],
|
||||
"attachment": [{"name": "OS", "type": "PropertyValue", "value": "Manjaro"},
|
||||
{"name": "Self Hosting",
|
||||
"type": "PropertyValue",
|
||||
"value": "Matrix HS, Nextcloud"}],
|
||||
"devices": "https://fosstodon.org/users/astdenis/collections/devices",
|
||||
"discoverable": True,
|
||||
"endpoints": {"sharedInbox": "https://fosstodon.org/inbox"},
|
||||
"featured": "https://fosstodon.org/users/astdenis/collections/featured",
|
||||
"featuredTags": "https://fosstodon.org/users/astdenis/collections/tags",
|
||||
"followers": "https://fosstodon.org/users/astdenis/followers",
|
||||
"following": "https://fosstodon.org/users/astdenis/following",
|
||||
"icon": {"mediaType": "image/jpeg",
|
||||
"type": "Image",
|
||||
"url": "https://cdn.fosstodon.org/accounts/avatars/000/252/976/original/09b7067cde009950.jpg"},
|
||||
"id": "https://fosstodon.org/users/astdenis",
|
||||
"image": {"mediaType": "image/jpeg",
|
||||
"type": "Image",
|
||||
"url": "https://cdn.fosstodon.org/accounts/headers/000/252/976/original/555a1ac1819e4e7f.jpg"},
|
||||
"inbox": "https://fosstodon.org/users/astdenis/inbox",
|
||||
"manuallyApprovesFollowers": False,
|
||||
"name": "Alain",
|
||||
"outbox": "https://fosstodon.org/users/astdenis/outbox",
|
||||
"preferredUsername": "astdenis",
|
||||
"publicKey": {"id": "https://fosstodon.org/users/astdenis#main-key",
|
||||
"owner": "https://fosstodon.org/users/astdenis",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\n"
|
||||
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuaoIq/b+aUNqGAJNYF76\n"
|
||||
"WY8tk49Vb1udyb7X+oseBXYtOwCDGfbZMalnFfqur1bAzogkKzuyjCeA3BfVs6R3\n"
|
||||
"Cll897kUveMNHVc24pslhOx5ZzwpNT8e4q97dNaeHWLSLH5H+4JJGbeoD23G5SaY\n"
|
||||
"9ZKt5iP+qRUlO/kSsUPwqsX9i2qSEqzwDiSvyRYhvvx4O588cUaaY9rAliLgtc/P\n"
|
||||
"4EID3v6Edexe2QosUaghwGbb8zZWsYq0O4Umn2QMN4LzmQ0FjP+lq1TFX8FkGDZH\n"
|
||||
"lnP+AMEQMyuac9Yb12t4RwvdsAIk4MXhAKvutMJm/X1GVQIyrsLEmvAO3rgk8dMr\n"
|
||||
"6QIDAQAB\n"
|
||||
"-----END PUBLIC KEY-----\n"},
|
||||
"published": "2020-07-25T00:00:00Z",
|
||||
"summary": "<p>Linux user and sysadmin since 1994, retired from the HPC field "
|
||||
"since 2019.</p><p>Utilisateur et sysadmin Linux depuis 1994, "
|
||||
"retraité du domaine du CHP depuis 2019.</p>",
|
||||
"tag": [],
|
||||
"type": "Person",
|
||||
"url": "https://fosstodon.org/@astdenis"
|
||||
}
|
||||
|
||||
ACTIVITYPUB_COLLECTION = {
|
||||
"@context": "https://www.w3.org/ns/activitystreams",
|
||||
"id": "https://diaspodon.fr/users/jaywink/followers",
|
||||
"totalItems": 231,
|
||||
"type": "OrderedCollection"
|
||||
}
|
||||
|
||||
ACTIVITYPUB_PROFILE_INVALID = {
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
|
@ -235,7 +314,8 @@ ACTIVITYPUB_RETRACTION = {
|
|||
},
|
||||
}
|
||||
|
||||
ACTIVITYPUB_RETRACTION_SHARE = {'@context': 'https://www.w3.org/ns/activitystreams',
|
||||
ACTIVITYPUB_RETRACTION_SHARE = {
|
||||
'@context': ['https://www.w3.org/ns/activitystreams',{"ostatus":"http://ostatus.org#","atomUri":"ostatus:atomUri"}],
|
||||
'id': 'https://mastodon.social/users/jaywink#announces/102571932479036987/undo',
|
||||
'type': 'Undo',
|
||||
'actor': 'https://mastodon.social/users/jaywink',
|
||||
|
@ -255,7 +335,7 @@ ACTIVITYPUB_RETRACTION_SHARE = {'@context': 'https://www.w3.org/ns/activitystrea
|
|||
'signatureValue': 'erI90OrrLqK1DiTqb4OO72XLcE7m74Fs4cH6s0plKKELHa7BZFQmtQYXKEgA9LwIUdSRrIurAUiaDWAw2sQZDg7opYo9x3z+GJDMZ3KxhBND7iHO8ZeGhV1ZBBKUMuBb3BfhOkd3ADp+RQ/fHcw6kOcViV2VsQduinAgQRpiutmGCLd/7eshqSF/aL4tFoAOyCskkm/5JDMNp2nnHNoXXJ+SZf7a8C6YPNDxWd7GzyQNeWkTBBdCJBPvS4HI0wQrTWemBvy6uP8k5QQ7FnqrrRrk/7zrcibFSInuYxiRTRV++rQ3irIbXNtoLhWQd36Iu5U22BclmkS1AAVBDUIj8w=='}}
|
||||
|
||||
ACTIVITYPUB_SHARE = {
|
||||
'@context': 'https://www.w3.org/ns/activitystreams',
|
||||
'@context': ['https://www.w3.org/ns/activitystreams',{"ostatus":"http://ostatus.org#","atomUri":"ostatus:atomUri"}],
|
||||
'id': 'https://mastodon.social/users/jaywink/statuses/102560701449465612/activity',
|
||||
'type': 'Announce',
|
||||
'actor': 'https://mastodon.social/users/jaywink',
|
||||
|
@ -312,7 +392,7 @@ ACTIVITYPUB_POST = {
|
|||
'published': '2019-06-29T21:08:45Z',
|
||||
'to': 'https://www.w3.org/ns/activitystreams#Public',
|
||||
'cc': ['https://diaspodon.fr/users/jaywink/followers',
|
||||
'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/'],
|
||||
'https://fosstodon.org/users/astdenis'],
|
||||
'object': {'id': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237',
|
||||
'type': 'Note',
|
||||
'summary': None,
|
||||
|
@ -322,13 +402,13 @@ ACTIVITYPUB_POST = {
|
|||
'attributedTo': 'https://diaspodon.fr/users/jaywink',
|
||||
'to': 'https://www.w3.org/ns/activitystreams#Public',
|
||||
'cc': ['https://diaspodon.fr/users/jaywink/followers',
|
||||
'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/'],
|
||||
'https://fosstodon.org/users/astdenis'],
|
||||
'sensitive': False,
|
||||
'atomUri': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237',
|
||||
'inReplyToAtomUri': None,
|
||||
'conversation': 'tag:diaspodon.fr,2019-06-28:objectId=2347687:objectType=Conversation',
|
||||
'content': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>',
|
||||
'contentMap': {'en': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'content': '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>',
|
||||
'contentMap': {'en': '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'attachment': [],
|
||||
'tag': [{'type': 'Mention',
|
||||
'href': 'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/',
|
||||
|
@ -379,9 +459,9 @@ ACTIVITYPUB_POST_WITH_TAGS = {
|
|||
'conversation': 'tag:diaspodon.fr,2019-06-28:objectId=2347687:objectType=Conversation',
|
||||
'content': '<p>boom <a href="https://mastodon.social/tags/test" class="mention hashtag" rel="tag">#<span>test</span></a></p>',
|
||||
'attachment': [],
|
||||
'tag': [{'type': 'Mention',
|
||||
'href': 'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/',
|
||||
'name': '@jaywink@dev.jasonrobinson.me'}],
|
||||
'tag': [{'type': 'Hashtag',
|
||||
'href': 'https://mastodon.social/tags/test',
|
||||
'name': '#test'}],
|
||||
'replies': {'id': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237/replies',
|
||||
'type': 'Collection',
|
||||
'first': {'type': 'CollectionPage',
|
||||
|
@ -472,13 +552,13 @@ ACTIVITYPUB_POST_WITH_SOURCE_MARKDOWN = {
|
|||
'conversation': 'tag:diaspodon.fr,2019-06-28:objectId=2347687:objectType=Conversation',
|
||||
'content': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>',
|
||||
'source': {
|
||||
'content': "@jaywink boom",
|
||||
'content': "@{jaywink@dev.jasonrobinson.me} boom",
|
||||
'mediaType': "text/markdown",
|
||||
},
|
||||
'contentMap': {'en': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'attachment': [],
|
||||
'tag': [{'type': 'Mention',
|
||||
'href': 'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/',
|
||||
'href': 'https://dev.jasonrobinson.me/u/jaywink/',
|
||||
'name': '@jaywink@dev.jasonrobinson.me'}],
|
||||
'replies': {'id': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237/replies',
|
||||
'type': 'Collection',
|
||||
|
@ -524,15 +604,15 @@ ACTIVITYPUB_POST_WITH_SOURCE_BBCODE = {
|
|||
'atomUri': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237',
|
||||
'inReplyToAtomUri': None,
|
||||
'conversation': 'tag:diaspodon.fr,2019-06-28:objectId=2347687:objectType=Conversation',
|
||||
'content': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>',
|
||||
'content': '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>',
|
||||
'source': {
|
||||
'content': "[url=https://example.com]jaywink[/url] boom",
|
||||
'mediaType': "text/bbcode",
|
||||
},
|
||||
'contentMap': {'en': '<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" class="u-url mention">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'contentMap': {'en': '<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/">@<span>jaywink</span></a></span> boom</p>'},
|
||||
'attachment': [],
|
||||
'tag': [{'type': 'Mention',
|
||||
'href': 'https://dev.jasonrobinson.me/p/d4574854-a5d7-42be-bfac-f70c16fcaa97/',
|
||||
'href': 'https://dev.jasonrobinson.me/u/jaywink/',
|
||||
'name': '@jaywink@dev.jasonrobinson.me'}],
|
||||
'replies': {'id': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237/replies',
|
||||
'type': 'Collection',
|
||||
|
@ -545,7 +625,17 @@ ACTIVITYPUB_POST_WITH_SOURCE_BBCODE = {
|
|||
'signatureValue': 'SjDACS7Z/Cb1SEC3AtxEokID5SHAYl7kpys/hhmaRbpXuFKCxfj2P9BmH8QhLnuam3sENZlrnBOcB5NlcBhIfwo/Xh242RZBmPQf+edTVYVCe1j19dihcftNCHtnqAcKwp/51dNM/OlKu2730FrwvOUXVIPtB7iVqkseO9TRzDYIDj+zBTksnR/NAYtq6SUpmefXfON0uW3N3Uq6PGfExJaS+aeqRf8cPGkZFSIUQZwOLXbIpb7BFjJ1+y1OMOAJueqvikUprAit3v6BiNWurAvSQpC7WWMFUKyA79/xtkO9kIPA/Q4C9ryqdzxZJ0jDhXiaIIQj2JZfIADdjLZHJA=='}
|
||||
}
|
||||
|
||||
ACTIVITYPUB_POST_OBJECT = {
|
||||
ACTIVITYPUB_POST_OBJECT = {'@context': ['https://www.w3.org/ns/activitystreams',
|
||||
{'ostatus': 'http://ostatus.org#',
|
||||
'atomUri': 'ostatus:atomUri',
|
||||
'inReplyToAtomUri': 'ostatus:inReplyToAtomUri',
|
||||
'conversation': 'ostatus:conversation',
|
||||
'sensitive': 'as:sensitive',
|
||||
'Hashtag': 'as:Hashtag',
|
||||
'toot': 'http://joinmastodon.org/ns#',
|
||||
'Emoji': 'toot:Emoji',
|
||||
'focalPoint': {'@container': '@list', '@id': 'toot:focalPoint'},
|
||||
'blurhash': 'toot:blurhash'}],
|
||||
'id': 'https://diaspodon.fr/users/jaywink/statuses/102356911717767237',
|
||||
'type': 'Note',
|
||||
'summary': None,
|
||||
|
|
|
@ -5,13 +5,13 @@ from federation.tests.fixtures.keys import get_dummy_private_key
|
|||
def test_signing_request():
|
||||
key = get_dummy_private_key()
|
||||
auth = get_http_authentication(key, "dummy_key_id")
|
||||
assert auth.algorithm == 'rsa-sha256'
|
||||
assert auth.headers == [
|
||||
assert auth.header_signer.headers == [
|
||||
'(request-target)',
|
||||
'user-agent',
|
||||
'host',
|
||||
'date',
|
||||
'digest',
|
||||
]
|
||||
assert auth.key == key.exportKey()
|
||||
assert auth.key_id == 'dummy_key_id'
|
||||
assert auth.header_signer.secret == key.exportKey()
|
||||
assert 'dummy_key_id' in auth.header_signer.signature_template
|
||||
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
from federation.protocols.matrix.appservice import get_registration_config, print_registration_yaml
|
||||
|
||||
|
||||
def test_get_registration():
|
||||
config = get_registration_config()
|
||||
assert config == {
|
||||
"id": "uniqueid",
|
||||
"url": "https://example.com/matrix",
|
||||
"as_token": "secret_token",
|
||||
"hs_token": "secret_token",
|
||||
"sender_localpart": "_myawesomeapp",
|
||||
"namespaces": {
|
||||
"users": [
|
||||
{
|
||||
"exclusive": False,
|
||||
"regex": "@.*",
|
||||
},
|
||||
{
|
||||
"exclusive": True,
|
||||
"regex": "@_myawesomeapp_.*",
|
||||
},
|
||||
],
|
||||
"aliases": [
|
||||
{
|
||||
"exclusive": False,
|
||||
"regex": "#.*",
|
||||
},
|
||||
{
|
||||
"exclusive": True,
|
||||
"regex": "#_myawesomeapp_.*",
|
||||
},
|
||||
],
|
||||
"rooms": [],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_print_registration_yaml():
|
||||
"""
|
||||
Just execute and ensure doesn't crash.
|
||||
"""
|
||||
print_registration_yaml()
|
|
@ -0,0 +1,28 @@
|
|||
import json
|
||||
|
||||
from federation.protocols.matrix.protocol import identify_request, identify_id
|
||||
from federation.types import RequestType
|
||||
|
||||
|
||||
def test_identify_id():
|
||||
assert identify_id('foobar') is False
|
||||
assert identify_id('foobar@example.com') is False
|
||||
assert identify_id('foobar@example.com:8000') is False
|
||||
assert identify_id('http://foobar@example.com') is False
|
||||
assert identify_id('https://foobar@example.com') is False
|
||||
assert identify_id('@foobar:domain.tld') is True
|
||||
assert identify_id('#foobar:domain.tld') is True
|
||||
assert identify_id('!foobar:domain.tld') is True
|
||||
|
||||
|
||||
class TestIdentifyRequest:
|
||||
def test_identifies_matrix_request(self):
|
||||
assert identify_request(RequestType(body=json.dumps('{"events": []}')))
|
||||
assert identify_request(RequestType(body=json.dumps('{"events": []}').encode('utf-8')))
|
||||
|
||||
def test_passes_gracefully_for_non_matrix_request(self):
|
||||
assert not identify_request(RequestType(body='foo'))
|
||||
assert not identify_request(RequestType(body='<xml></<xml>'))
|
||||
assert not identify_request(RequestType(body=b'<xml></<xml>'))
|
||||
assert not identify_request(RequestType(body=json.dumps('{"@context": "foo"}')))
|
||||
assert not identify_request(RequestType(body=json.dumps('{"@context": "foo"}').encode('utf-8')))
|
|
@ -10,7 +10,7 @@ class TestRetrieveRemoteContent:
|
|||
mock_import.return_value = mock_retrieve
|
||||
retrieve_remote_content("https://example.com/foobar")
|
||||
mock_retrieve.retrieve_and_parse_content.assert_called_once_with(
|
||||
id="https://example.com/foobar", guid=None, handle=None, entity_type=None, sender_key_fetcher=None,
|
||||
id="https://example.com/foobar", guid=None, handle=None, entity_type=None, cache=True, sender_key_fetcher=None,
|
||||
)
|
||||
|
||||
@patch("federation.fetchers.importlib.import_module")
|
||||
|
@ -19,7 +19,7 @@ class TestRetrieveRemoteContent:
|
|||
mock_import.return_value = mock_retrieve
|
||||
retrieve_remote_content("1234", handle="user@example.com", entity_type="post", sender_key_fetcher=sum)
|
||||
mock_retrieve.retrieve_and_parse_content.assert_called_once_with(
|
||||
id="1234", guid="1234", handle="user@example.com", entity_type="post", sender_key_fetcher=sum,
|
||||
id="1234", guid="1234", handle="user@example.com", entity_type="post", cache=True, sender_key_fetcher=sum,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -70,7 +70,9 @@ class TestHandleSend:
|
|||
assert kwargs['headers'] == {
|
||||
'Content-Type': 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||
}
|
||||
assert encode_if_text("https://www.w3.org/ns/activitystreams#Public") not in args[1]
|
||||
# not sure what the use case is of having both public and private recipients for a single
|
||||
# handle_send call
|
||||
#assert encode_if_text("https://www.w3.org/ns/activitystreams#Public") not in args[1]
|
||||
|
||||
# Ensure third call is a public activitypub payload
|
||||
args, kwargs = mock_send.call_args_list[2]
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
from datetime import timedelta
|
||||
import json
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from federation.entities.activitypub.entities import ActivitypubFollow, ActivitypubPost
|
||||
import pytest
|
||||
|
||||
from federation.entities.activitypub.models import Follow, Note
|
||||
from federation.tests.fixtures.payloads import (
|
||||
ACTIVITYPUB_FOLLOW, ACTIVITYPUB_POST, ACTIVITYPUB_POST_OBJECT, ACTIVITYPUB_POST_OBJECT_IMAGES)
|
||||
from federation.utils.activitypub import (
|
||||
|
@ -42,41 +45,60 @@ class TestRetrieveAndParseDocument:
|
|||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(None, None, None))
|
||||
def test_calls_fetch_document(self, mock_fetch):
|
||||
retrieve_and_parse_document("https://example.com/foobar")
|
||||
# auth argument is passed with kwargs
|
||||
auth = mock_fetch.call_args.kwargs.get('auth', None)
|
||||
mock_fetch.assert_called_once_with(
|
||||
"https://example.com/foobar", extra_headers={'accept': 'application/activity+json'},
|
||||
"https://example.com/foobar", extra_headers={'accept': 'application/activity+json'}, cache=True, auth=auth,
|
||||
)
|
||||
|
||||
@patch("federation.entities.activitypub.models.extract_receivers", return_value=[])
|
||||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(
|
||||
json.dumps(ACTIVITYPUB_FOLLOW), None, None),
|
||||
)
|
||||
@patch.object(ActivitypubFollow, "post_receive")
|
||||
def test_returns_entity_for_valid_document__follow(self, mock_post_receive, mock_fetch):
|
||||
entity = retrieve_and_parse_document("https://example.com/foobar")
|
||||
assert isinstance(entity, ActivitypubFollow)
|
||||
@patch.object(Follow, "post_receive")
|
||||
def test_returns_entity_for_valid_document__follow(self, mock_post_receive, mock_fetch, mock_recv):
|
||||
entity = retrieve_and_parse_document("https://example.com/follow")
|
||||
print(entity)
|
||||
assert isinstance(entity, Follow)
|
||||
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity", return_value=None)
|
||||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(
|
||||
json.dumps(ACTIVITYPUB_POST_OBJECT), None, None),
|
||||
)
|
||||
def test_returns_entity_for_valid_document__post__without_activity(self, mock_fetch):
|
||||
entity = retrieve_and_parse_document("https://example.com/foobar")
|
||||
assert isinstance(entity, ActivitypubPost)
|
||||
def test_returns_entity_for_valid_document__post__without_activity(self, mock_fetch, mock_recv):
|
||||
entity = retrieve_and_parse_document("https://diaspodon.fr/users/jaywink/statuses/102356911717767237")
|
||||
assert isinstance(entity, Note)
|
||||
|
||||
@patch("federation.entities.activitypub.models.extract_receivers", return_value=[])
|
||||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(
|
||||
json.dumps(ACTIVITYPUB_POST_OBJECT_IMAGES), None, None),
|
||||
)
|
||||
def test_returns_entity_for_valid_document__post__without_activity__with_images(self, mock_fetch):
|
||||
entity = retrieve_and_parse_document("https://example.com/foobar")
|
||||
assert isinstance(entity, ActivitypubPost)
|
||||
def test_returns_entity_for_valid_document__post__without_activity__with_images(self, mock_fetch, mock_recv):
|
||||
entity = retrieve_and_parse_document("https://mastodon.social/users/foobar/statuses/34324r")
|
||||
assert isinstance(entity, Note)
|
||||
assert len(entity._children) == 1
|
||||
assert entity._children[0].url == "https://files.mastodon.social/media_attachments/files/017/792/237/original" \
|
||||
"/foobar.jpg"
|
||||
|
||||
@patch("federation.entities.activitypub.models.verify_ld_signature", return_value=None)
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity", return_value=None)
|
||||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(
|
||||
json.dumps(ACTIVITYPUB_POST), None, None),
|
||||
)
|
||||
def test_returns_entity_for_valid_document__post__wrapped_in_activity(self, mock_fetch):
|
||||
def test_returns_entity_for_valid_document__post__wrapped_in_activity(
|
||||
self, mock_fetch, mock_recv, mock_sign):
|
||||
entity = retrieve_and_parse_document("https://diaspodon.fr/users/jaywink/statuses/102356911717767237/activity")
|
||||
assert isinstance(entity, Note)
|
||||
|
||||
@patch("federation.entities.activitypub.models.verify_ld_signature", return_value=None)
|
||||
@patch("federation.entities.activitypub.models.get_profile_or_entity", return_value=None)
|
||||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=(
|
||||
json.dumps(ACTIVITYPUB_POST), None, None),
|
||||
)
|
||||
def test_returns_none_for_forged_document(
|
||||
self, mock_fetch, mock_recv, mock_sign):
|
||||
entity = retrieve_and_parse_document("https://example.com/foobar")
|
||||
assert isinstance(entity, ActivitypubPost)
|
||||
assert entity is None
|
||||
|
||||
@patch("federation.utils.activitypub.fetch_document", autospec=True, return_value=('{"foo": "bar"}', None, None))
|
||||
def test_returns_none_for_invalid_document(self, mock_fetch):
|
||||
|
|
|
@ -127,7 +127,7 @@ class TestRetrieveAndParseContent:
|
|||
@patch("federation.utils.diaspora.get_fetch_content_endpoint", return_value="https://example.com/fetch/spam/eggs")
|
||||
def test_calls_fetch_document(self, mock_get, mock_fetch):
|
||||
retrieve_and_parse_content(id="eggs", guid="eggs", handle="user@example.com", entity_type="spam")
|
||||
mock_fetch.assert_called_once_with("https://example.com/fetch/spam/eggs")
|
||||
mock_fetch.assert_called_once_with("https://example.com/fetch/spam/eggs", cache=True)
|
||||
|
||||
@patch("federation.utils.diaspora.fetch_document", return_value=(None, 404, None))
|
||||
@patch("federation.utils.diaspora.get_fetch_content_endpoint")
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from datetime import timedelta
|
||||
from unittest.mock import patch, Mock, call
|
||||
|
||||
import pytest
|
||||
|
@ -12,24 +13,25 @@ from federation.utils.network import (
|
|||
class TestFetchDocument:
|
||||
call_args = {"timeout": 10, "headers": {'user-agent': USER_AGENT}}
|
||||
|
||||
@patch("federation.utils.network.requests.get", autospec=True, return_value=Mock(status_code=200, text="foo"))
|
||||
@patch("federation.utils.network.session.get", return_value=Mock(status_code=200, text="foo"))
|
||||
def test_extra_headers(self, mock_get):
|
||||
fetch_document("https://example.com/foo", extra_headers={'accept': 'application/activity+json'})
|
||||
mock_get.assert_called_once_with('https://example.com/foo', headers={
|
||||
'user-agent': USER_AGENT, 'accept': 'application/activity+json',
|
||||
})
|
||||
mock_get.assert_called_once_with('https://example.com/foo', timeout=10, headers={
|
||||
'user-agent': USER_AGENT, 'accept': 'application/activity+json'},
|
||||
expire_after=timedelta(hours=6)
|
||||
)
|
||||
|
||||
def test_raises_without_url_and_host(self):
|
||||
with pytest.raises(ValueError):
|
||||
fetch_document()
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_url_is_called(self, mock_get):
|
||||
mock_get.return_value = Mock(status_code=200, text="foo")
|
||||
fetch_document("https://localhost")
|
||||
assert mock_get.called
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_host_is_called_with_https_first_then_http(self, mock_get):
|
||||
def mock_failing_https_get(url, *args, **kwargs):
|
||||
if url.find("https://") > -1:
|
||||
|
@ -43,7 +45,7 @@ class TestFetchDocument:
|
|||
call("http://localhost/", **self.call_args),
|
||||
]
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_host_is_sanitized(self, mock_get):
|
||||
mock_get.return_value = Mock(status_code=200, text="foo")
|
||||
fetch_document(host="http://localhost")
|
||||
|
@ -51,7 +53,7 @@ class TestFetchDocument:
|
|||
call("https://localhost/", **self.call_args)
|
||||
]
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_path_is_sanitized(self, mock_get):
|
||||
mock_get.return_value = Mock(status_code=200, text="foo")
|
||||
fetch_document(host="localhost", path="foobar/bazfoo")
|
||||
|
@ -59,7 +61,7 @@ class TestFetchDocument:
|
|||
call("https://localhost/foobar/bazfoo", **self.call_args)
|
||||
]
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_exception_is_raised_if_both_protocols_fail(self, mock_get):
|
||||
mock_get.side_effect = HTTPError
|
||||
doc, code, exc = fetch_document(host="localhost")
|
||||
|
@ -68,7 +70,7 @@ class TestFetchDocument:
|
|||
assert code == None
|
||||
assert exc.__class__ == HTTPError
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_exception_is_raised_if_url_fails(self, mock_get):
|
||||
mock_get.side_effect = HTTPError
|
||||
doc, code, exc = fetch_document("localhost")
|
||||
|
@ -77,7 +79,7 @@ class TestFetchDocument:
|
|||
assert code == None
|
||||
assert exc.__class__ == HTTPError
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_exception_is_raised_if_http_fails_and_raise_ssl_errors_true(self, mock_get):
|
||||
mock_get.side_effect = SSLError
|
||||
doc, code, exc = fetch_document("localhost")
|
||||
|
@ -86,7 +88,7 @@ class TestFetchDocument:
|
|||
assert code == None
|
||||
assert exc.__class__ == SSLError
|
||||
|
||||
@patch("federation.utils.network.requests.get")
|
||||
@patch("federation.utils.network.session.get")
|
||||
def test_exception_is_raised_on_network_error(self, mock_get):
|
||||
mock_get.side_effect = RequestException
|
||||
doc, code, exc = fetch_document(host="localhost")
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from federation.utils.text import decode_if_bytes, encode_if_text, validate_handle, process_text_links, find_tags
|
||||
import pytest
|
||||
|
||||
from federation.utils.text import decode_if_bytes, encode_if_text, validate_handle, find_tags
|
||||
|
||||
|
||||
def test_decode_if_bytes():
|
||||
|
@ -18,107 +20,49 @@ class TestFindTags:
|
|||
|
||||
def test_all_tags_are_parsed_from_text(self):
|
||||
source = "#starting and #MixED with some #line\nendings also tags can\n#start on new line"
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"starting", "mixed", "line", "start"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "#starting/starting and #MixED/mixed with some #line/line\nendings also tags can\n" \
|
||||
"#start/start on new line"
|
||||
|
||||
def test_code_block_tags_ignored(self):
|
||||
source = "foo\n```\n#code\n```\n#notcode\n\n #alsocode\n"
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"notcode"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "foo\n```\n#code\n```\n#notcode/notcode\n\n #alsocode\n"
|
||||
|
||||
def test_endings_are_filtered_out(self):
|
||||
source = "#parenthesis) #exp! #list] *#doh* _#bah_ #gah% #foo/#bar"
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"parenthesis", "exp", "list", "doh", "bah", "gah", "foo", "bar"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "#parenthesis/parenthesis) #exp/exp! #list/list] *#doh/doh* _#bah/bah_ #gah/gah% " \
|
||||
"#foo/foo/#bar/bar"
|
||||
|
||||
def test_finds_tags(self):
|
||||
source = "#post **Foobar** #tag #OtherTag #third\n#fourth"
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"third", "fourth", "post", "othertag", "tag"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "#post/post **Foobar** #tag/tag #OtherTag/othertag #third/third\n#fourth/fourth"
|
||||
|
||||
def test_ok_with_html_tags_in_text(self):
|
||||
source = "<p>#starting and <span>#MixED</span> however not <#>this</#> or <#/>that"
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"starting", "mixed"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "<p>#starting/starting and <span>#MixED/mixed</span> however not <#>this</#> or <#/>that"
|
||||
|
||||
def test_postfixed_tags(self):
|
||||
source = "#foo) #bar] #hoo, #hee."
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"foo", "bar", "hoo", "hee"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "#foo/foo) #bar/bar] #hoo/hoo, #hee/hee."
|
||||
|
||||
def test_prefixed_tags(self):
|
||||
source = "(#foo [#bar"
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"foo", "bar"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == "(#foo/foo [#bar/bar"
|
||||
|
||||
def test_invalid_text_returns_no_tags(self):
|
||||
source = "#a!a #a#a #a$a #a%a #a^a #a&a #a*a #a+a #a.a #a,a #a@a #a£a #a(a #a)a #a=a " \
|
||||
"#a?a #a`a #a'a #a\\a #a{a #a[a #a]a #a}a #a~a #a;a #a:a #a\"a #a’a #a”a #\xa0cd"
|
||||
tags, text = find_tags(source)
|
||||
assert tags == set()
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == source
|
||||
tags = find_tags(source)
|
||||
assert tags == {'a'}
|
||||
|
||||
def test_start_of_paragraph_in_html_content(self):
|
||||
source = '<p>First line</p><p>#foobar #barfoo</p>'
|
||||
tags, text = find_tags(source)
|
||||
tags = find_tags(source)
|
||||
assert tags == {"foobar", "barfoo"}
|
||||
assert text == source
|
||||
tags, text = find_tags(source, replacer=self._replacer)
|
||||
assert text == '<p>First line</p><p>#foobar/foobar #barfoo/barfoo</p>'
|
||||
|
||||
|
||||
class TestProcessTextLinks:
|
||||
def test_link_at_start_or_end(self):
|
||||
assert process_text_links('https://example.org example.org\nhttp://example.org') == \
|
||||
'<a href="https://example.org" rel="nofollow" target="_blank">https://example.org</a> ' \
|
||||
'<a href="http://example.org" rel="nofollow" target="_blank">example.org</a>\n' \
|
||||
'<a href="http://example.org" rel="nofollow" target="_blank">http://example.org</a>'
|
||||
|
||||
def test_existing_links_get_attrs_added(self):
|
||||
assert process_text_links('<a href="https://example.org">https://example.org</a>') == \
|
||||
'<a href="https://example.org" rel="nofollow" target="_blank">https://example.org</a>'
|
||||
|
||||
def test_code_sections_are_skipped(self):
|
||||
assert process_text_links('<code>https://example.org</code><code>\nhttps://example.org\n</code>') == \
|
||||
'<code>https://example.org</code><code>\nhttps://example.org\n</code>'
|
||||
|
||||
def test_emails_are_skipped(self):
|
||||
assert process_text_links('foo@example.org') == 'foo@example.org'
|
||||
|
||||
def test_does_not_add_target_blank_if_link_is_internal(self):
|
||||
assert process_text_links('<a href="/streams/tag/foobar">#foobar</a>') == \
|
||||
'<a href="/streams/tag/foobar">#foobar</a>'
|
||||
|
||||
def test_does_not_remove_mention_classes(self):
|
||||
assert process_text_links('<p><span class="h-card"><a href="https://dev.jasonrobinson.me/u/jaywink/" '
|
||||
'class="u-url mention">@<span>jaywink</span></a></span> boom</p>') == \
|
||||
'<p><span class="h-card"><a class="u-url mention" href="https://dev.jasonrobinson.me/u/jaywink/" ' \
|
||||
'rel="nofollow" target="_blank">@<span>jaywink</span></a></span> boom</p>'
|
||||
|
||||
|
||||
def test_validate_handle():
|
||||
|
|
|
@ -2,7 +2,9 @@ from enum import Enum
|
|||
from typing import Optional, Dict, Union
|
||||
|
||||
import attr
|
||||
# noinspection PyPackageRequirements
|
||||
from Crypto.PublicKey import RSA
|
||||
# noinspection PyPackageRequirements
|
||||
from Crypto.PublicKey.RSA import RsaKey
|
||||
|
||||
|
||||
|
@ -26,6 +28,15 @@ class ReceiverVariant(Enum):
|
|||
FOLLOWERS = "followers"
|
||||
|
||||
|
||||
# TODO needed?
|
||||
class UserVariant(Enum):
|
||||
"""
|
||||
Indicates whether the user is local or remote.
|
||||
"""
|
||||
LOCAL = "local"
|
||||
REMOTE = "remote"
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class UserType:
|
||||
id: str = attr.ib()
|
||||
|
@ -36,6 +47,11 @@ class UserType:
|
|||
handle: Optional[str] = attr.ib(default=None)
|
||||
guid: Optional[str] = attr.ib(default=None)
|
||||
|
||||
# Required only if sending to Matrix protocol
|
||||
mxid: Optional[str] = attr.ib(default=None)
|
||||
# TODO needed?
|
||||
variant: Optional[UserVariant] = attr.ib(default=None)
|
||||
|
||||
@property
|
||||
def rsa_private_key(self) -> RsaKey:
|
||||
if isinstance(self.private_key, str):
|
||||
|
|
|
@ -1,14 +1,23 @@
|
|||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional, Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from federation.entities.activitypub.entities import ActivitypubProfile
|
||||
from federation.entities.activitypub.mappers import message_to_objects
|
||||
from federation.protocols.activitypub.signing import get_http_authentication
|
||||
from federation.utils.network import fetch_document, try_retrieve_webfinger_document
|
||||
from federation.utils.text import decode_if_bytes, validate_handle
|
||||
|
||||
logger = logging.getLogger('federation')
|
||||
|
||||
try:
|
||||
from federation.utils.django import get_federation_user
|
||||
federation_user = get_federation_user()
|
||||
except Exception as exc:
|
||||
federation_user = None
|
||||
logger.warning("django is required for get requests signing: %s", exc)
|
||||
|
||||
type_path = re.compile(r'^application/(activity|ld)\+json')
|
||||
|
||||
def get_profile_id_from_webfinger(handle: str) -> Optional[str]:
|
||||
"""
|
||||
|
@ -23,30 +32,40 @@ def get_profile_id_from_webfinger(handle: str) -> Optional[str]:
|
|||
except json.JSONDecodeError:
|
||||
return
|
||||
for link in doc.get("links", []):
|
||||
if link.get("rel") == "self" and link.get("type") == "application/activity+json":
|
||||
if link.get("rel") == "self" and type_path.match(link.get("type")):
|
||||
return link["href"]
|
||||
logger.debug("get_profile_id_from_webfinger: found webfinger but it has no as2 self href")
|
||||
|
||||
|
||||
def retrieve_and_parse_content(**kwargs) -> Optional[Any]:
|
||||
return retrieve_and_parse_document(kwargs.get("id"))
|
||||
return retrieve_and_parse_document(kwargs.get("id"), cache=kwargs.get('cache',True))
|
||||
|
||||
|
||||
def retrieve_and_parse_document(fid: str) -> Optional[Any]:
|
||||
def retrieve_and_parse_document(fid: str, cache: bool=True) -> Optional[Any]:
|
||||
"""
|
||||
Retrieve remote document by ID and return the entity.
|
||||
"""
|
||||
document, status_code, ex = fetch_document(fid, extra_headers={'accept': 'application/activity+json'})
|
||||
from federation.entities.activitypub.models import element_to_objects # Circulars
|
||||
document, status_code, ex = fetch_document(fid, extra_headers={'accept': 'application/activity+json'}, cache=cache,
|
||||
auth=get_http_authentication(federation_user.rsa_private_key,f'{federation_user.id}#main-key', digest=False) if federation_user else None)
|
||||
if document:
|
||||
document = json.loads(decode_if_bytes(document))
|
||||
entities = message_to_objects(document, fid)
|
||||
logger.info("retrieve_and_parse_document - found %s entities", len(entities))
|
||||
try:
|
||||
document = json.loads(decode_if_bytes(document))
|
||||
except json.decoder.JSONDecodeError:
|
||||
return None
|
||||
entities = element_to_objects(document)
|
||||
if entities:
|
||||
logger.info("retrieve_and_parse_document - using first entity: %s", entities[0])
|
||||
return entities[0]
|
||||
entity = entities[0]
|
||||
id = entity.id or entity.activity_id
|
||||
# check against potential payload forgery (CVE-2024-23832)
|
||||
if urlparse(id).netloc != urlparse(fid).netloc:
|
||||
logger.warning('retrieve_and_parse_document - payload may be forged, discarding: %s', fid)
|
||||
return None
|
||||
logger.info("retrieve_and_parse_document - using first entity: %s", entity)
|
||||
return entity
|
||||
|
||||
|
||||
def retrieve_and_parse_profile(fid: str) -> Optional[ActivitypubProfile]:
|
||||
def retrieve_and_parse_profile(fid: str) -> Optional[Any]:
|
||||
"""
|
||||
Retrieve the remote fid and return a Profile object.
|
||||
"""
|
||||
|
@ -66,3 +85,4 @@ def retrieve_and_parse_profile(fid: str) -> Optional[ActivitypubProfile]:
|
|||
profile, ex)
|
||||
return
|
||||
return profile
|
||||
|
||||
|
|
|
@ -152,6 +152,7 @@ def parse_profile_from_hcard(hcard: str, handle: str):
|
|||
public=True,
|
||||
id=handle,
|
||||
handle=handle,
|
||||
finger=handle,
|
||||
guid=_get_element_text_or_none(doc, ".uid"),
|
||||
public_key=_get_element_text_or_none(doc, ".key"),
|
||||
username=handle.split('@')[0],
|
||||
|
@ -161,8 +162,8 @@ def parse_profile_from_hcard(hcard: str, handle: str):
|
|||
|
||||
|
||||
def retrieve_and_parse_content(
|
||||
id: str, guid: str, handle: str, entity_type: str, sender_key_fetcher: Callable[[str], str]=None,
|
||||
):
|
||||
id: str, guid: str, handle: str, entity_type: str, cache: bool=True,
|
||||
sender_key_fetcher: Callable[[str], str]=None):
|
||||
"""Retrieve remote content and return an Entity class instance.
|
||||
|
||||
This is basically the inverse of receiving an entity. Instead, we fetch it, then call "handle_receive".
|
||||
|
@ -175,7 +176,7 @@ def retrieve_and_parse_content(
|
|||
return
|
||||
_username, domain = handle.split("@")
|
||||
url = get_fetch_content_endpoint(domain, entity_type.lower(), guid)
|
||||
document, status_code, error = fetch_document(url)
|
||||
document, status_code, error = fetch_document(url, cache=cache)
|
||||
if status_code == 200:
|
||||
request = RequestType(body=document)
|
||||
_sender, _protocol, entities = handle_receive(request, sender_key_fetcher=sender_key_fetcher)
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
import importlib
|
||||
import redis
|
||||
from requests_cache import RedisCache, SQLiteCache
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from federation.types import UserType
|
||||
|
||||
|
||||
def get_configuration():
|
||||
|
@ -20,13 +23,14 @@ def get_configuration():
|
|||
}
|
||||
try:
|
||||
configuration.update(settings.FEDERATION)
|
||||
except ImproperlyConfigured:
|
||||
except (ModuleNotFoundError, ImproperlyConfigured):
|
||||
# Django is not properly configured, return defaults
|
||||
return configuration
|
||||
if not all([
|
||||
"get_private_key_function" in configuration,
|
||||
"get_profile_function" in configuration,
|
||||
"base_url" in configuration,
|
||||
"federation_id" in configuration,
|
||||
]):
|
||||
raise ImproperlyConfigured("Missing required FEDERATION settings, please check documentation.")
|
||||
return configuration
|
||||
|
@ -42,3 +46,35 @@ def get_function_from_config(item):
|
|||
module = importlib.import_module(module_path)
|
||||
func = getattr(module, func_name)
|
||||
return func
|
||||
|
||||
def get_federation_user():
|
||||
config = get_configuration()
|
||||
if not config.get('federation_id'): return None
|
||||
|
||||
try:
|
||||
get_key = get_function_from_config("get_private_key_function")
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
key = get_key(config['federation_id'])
|
||||
if not key: return None
|
||||
|
||||
return UserType(id=config['federation_id'], private_key=key)
|
||||
|
||||
def get_redis():
|
||||
"""
|
||||
Returns a connected redis object if available
|
||||
"""
|
||||
config = get_configuration()
|
||||
if not config.get('redis'): return None
|
||||
|
||||
return redis.Redis(**config['redis'])
|
||||
|
||||
def get_requests_cache_backend(namespace):
|
||||
"""
|
||||
Use RedisCache is available, else fallback to SQLiteCache
|
||||
"""
|
||||
config = get_configuration()
|
||||
if not config.get('redis'): return SQLiteCache()
|
||||
|
||||
return RedisCache(namespace, **config['redis'])
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
import hashlib
|
||||
import hmac
|
||||
import uuid
|
||||
from typing import Dict, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from federation.utils.django import get_function_from_config
|
||||
|
||||
|
||||
def appservice_auth_header() -> Dict:
|
||||
config = get_matrix_configuration()
|
||||
return {
|
||||
"Authorization": f"Bearer {config['appservice']['token']}",
|
||||
}
|
||||
|
||||
|
||||
def generate_dendrite_mac(shared_secret: str, username: str, password: str, admin: bool) -> str:
|
||||
"""
|
||||
Generate a MAC for using in registering users with Dendrite.
|
||||
"""
|
||||
# From: https://github.com/matrix-org/dendrite/blob/master/clientapi/routing/register.go
|
||||
mac = hmac.new(
|
||||
key=shared_secret.encode('utf8'),
|
||||
digestmod=hashlib.sha1,
|
||||
)
|
||||
|
||||
mac.update(username.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(password.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(b"admin" if admin else b"notadmin")
|
||||
return mac.hexdigest()
|
||||
|
||||
|
||||
def get_matrix_configuration() -> Optional[Dict]:
|
||||
"""
|
||||
Return Matrix configuration.
|
||||
|
||||
Requires Django support currently.
|
||||
"""
|
||||
try:
|
||||
matrix_config_func = get_function_from_config("matrix_config_function")
|
||||
except AttributeError:
|
||||
raise AttributeError("Not configured for Matrix support")
|
||||
return matrix_config_func()
|
||||
|
||||
|
||||
def register_dendrite_user(username: str) -> Dict:
|
||||
"""
|
||||
Shared secret registration for Dendrite.
|
||||
|
||||
Note uses the legacy route, see
|
||||
https://github.com/matrix-org/dendrite/issues/1669
|
||||
|
||||
Currently compatible with Django apps only.
|
||||
|
||||
Returns:
|
||||
{
|
||||
'user_id': '@username:domain.tld',
|
||||
'access_token': 'randomaccesstoken',
|
||||
'home_server': 'domain.tld',
|
||||
'device_id': 'randomdevice'
|
||||
}
|
||||
"""
|
||||
matrix_config = get_matrix_configuration
|
||||
|
||||
password = str(uuid.uuid4())
|
||||
mac = generate_dendrite_mac(
|
||||
matrix_config["registration_shared_secret"],
|
||||
username,
|
||||
password,
|
||||
False,
|
||||
)
|
||||
|
||||
# Register using shared secret
|
||||
response = requests.post(
|
||||
f"{matrix_config['homeserver_base_url']}/_matrix/client/api/v1/register?kind=user",
|
||||
json={
|
||||
"type": "org.matrix.login.shared_secret",
|
||||
"mac": mac,
|
||||
"password": password,
|
||||
"user": username,
|
||||
"admin": False,
|
||||
},
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
|
@ -3,34 +3,39 @@ import datetime
|
|||
import logging
|
||||
import re
|
||||
import socket
|
||||
from typing import Optional
|
||||
from typing import Optional, Dict
|
||||
from urllib.parse import quote
|
||||
from uuid import uuid4
|
||||
|
||||
import requests
|
||||
from requests_cache import CachedSession, DO_NOT_CACHE
|
||||
from requests.exceptions import RequestException, HTTPError, SSLError
|
||||
from requests.exceptions import ConnectionError
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
|
||||
from federation import __version__
|
||||
from federation.utils.django import get_requests_cache_backend
|
||||
|
||||
logger = logging.getLogger("federation")
|
||||
|
||||
USER_AGENT = "python/federation/%s" % __version__
|
||||
|
||||
session = CachedSession('fed_cache', backend=get_requests_cache_backend('fed_cache'))
|
||||
EXPIRATION = datetime.timedelta(hours=6)
|
||||
|
||||
def fetch_content_type(url: str) -> Optional[str]:
|
||||
"""
|
||||
Fetch the HEAD of the remote url to determine the content type.
|
||||
"""
|
||||
try:
|
||||
response = requests.head(url, headers={'user-agent': USER_AGENT}, timeout=10)
|
||||
response = session.head(url, headers={'user-agent': USER_AGENT}, timeout=10)
|
||||
except RequestException as ex:
|
||||
logger.warning("fetch_content_type - %s when fetching url %s", ex, url)
|
||||
else:
|
||||
return response.headers.get('Content-Type')
|
||||
|
||||
|
||||
def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True, extra_headers=None):
|
||||
def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True, extra_headers=None, cache=True, **kwargs):
|
||||
"""Helper method to fetch remote document.
|
||||
|
||||
Must be given either the ``url`` or ``host``.
|
||||
|
@ -43,6 +48,7 @@ def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=T
|
|||
:arg timeout: Seconds to wait for response (defaults to 10)
|
||||
:arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True)
|
||||
:arg extra_headers: Optional extra headers dictionary to add to requests
|
||||
:arg kwargs holds extra args passed to requests.get
|
||||
:returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None)
|
||||
:raises ValueError: If neither url nor host are given as parameters
|
||||
"""
|
||||
|
@ -58,9 +64,11 @@ def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=T
|
|||
# Use url since it was given
|
||||
logger.debug("fetch_document: trying %s", url)
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout, headers=headers)
|
||||
response = session.get(url, timeout=timeout, headers=headers,
|
||||
expire_after=EXPIRATION if cache else DO_NOT_CACHE, **kwargs)
|
||||
logger.debug("fetch_document: found document, code %s", response.status_code)
|
||||
response.raise_for_status()
|
||||
if not response.encoding: response.encoding = 'utf-8'
|
||||
return response.text, response.status_code, None
|
||||
except RequestException as ex:
|
||||
logger.debug("fetch_document: exception %s", ex)
|
||||
|
@ -71,7 +79,7 @@ def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=T
|
|||
url = "https://%s%s" % (host_string, path_string)
|
||||
logger.debug("fetch_document: trying %s", url)
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout, headers=headers)
|
||||
response = session.get(url, timeout=timeout, headers=headers)
|
||||
logger.debug("fetch_document: found document, code %s", response.status_code)
|
||||
response.raise_for_status()
|
||||
return response.text, response.status_code, None
|
||||
|
@ -83,7 +91,7 @@ def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=T
|
|||
url = url.replace("https://", "http://")
|
||||
logger.debug("fetch_document: trying %s", url)
|
||||
try:
|
||||
response = requests.get(url, timeout=timeout, headers=headers)
|
||||
response = session.get(url, timeout=timeout, headers=headers)
|
||||
logger.debug("fetch_document: found document, code %s", response.status_code)
|
||||
response.raise_for_status()
|
||||
return response.text, response.status_code, None
|
||||
|
@ -107,6 +115,22 @@ def fetch_host_ip(host: str) -> str:
|
|||
return ip
|
||||
|
||||
|
||||
def fetch_file(url: str, timeout: int = 30, extra_headers: Dict = None) -> str:
|
||||
"""
|
||||
Download a file with a temporary name and return the name.
|
||||
"""
|
||||
headers = {'user-agent': USER_AGENT}
|
||||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
response = session.get(url, timeout=timeout, headers=headers, stream=True)
|
||||
response.raise_for_status()
|
||||
name = f"/tmp/{str(uuid4())}"
|
||||
with open(name, "wb") as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
return name
|
||||
|
||||
|
||||
def parse_http_date(date):
|
||||
"""
|
||||
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
|
||||
|
@ -157,7 +181,7 @@ def parse_http_date(date):
|
|||
raise ValueError("%r is not a valid date" % date) from exc
|
||||
|
||||
|
||||
def send_document(url, data, timeout=10, *args, **kwargs):
|
||||
def send_document(url, data, timeout=10, method="post", *args, **kwargs):
|
||||
"""Helper method to send a document via POST.
|
||||
|
||||
Additional ``*args`` and ``**kwargs`` will be passed on to ``requests.post``.
|
||||
|
@ -165,9 +189,12 @@ def send_document(url, data, timeout=10, *args, **kwargs):
|
|||
:arg url: Full url to send to, including protocol
|
||||
:arg data: Dictionary (will be form-encoded), bytes, or file-like object to send in the body
|
||||
:arg timeout: Seconds to wait for response (defaults to 10)
|
||||
:arg method: Method to use, defaults to post
|
||||
:returns: Tuple of status code (int or None) and error (exception class instance or None)
|
||||
"""
|
||||
logger.debug("send_document: url=%s, data=%s, timeout=%s", url, data, timeout)
|
||||
logger.debug("send_document: url=%s, data=%s, timeout=%s, method=%s", url, data, timeout, method)
|
||||
if not method:
|
||||
method = "post"
|
||||
headers = CaseInsensitiveDict({
|
||||
'User-Agent': USER_AGENT,
|
||||
})
|
||||
|
@ -177,10 +204,12 @@ def send_document(url, data, timeout=10, *args, **kwargs):
|
|||
kwargs.update({
|
||||
"data": data, "timeout": timeout, "headers": headers
|
||||
})
|
||||
request_func = getattr(requests, method)
|
||||
try:
|
||||
response = requests.post(url, *args, **kwargs)
|
||||
response = request_func(url, *args, **kwargs)
|
||||
logger.debug("send_document: response status code %s", response.status_code)
|
||||
return response.status_code, None
|
||||
# TODO support rate limit 429 code
|
||||
except RequestException as ex:
|
||||
logger.debug("send_document: exception %s", ex)
|
||||
return None, ex
|
||||
|
@ -192,7 +221,7 @@ def try_retrieve_webfinger_document(handle: str) -> Optional[str]:
|
|||
"""
|
||||
try:
|
||||
host = handle.split("@")[1]
|
||||
except AttributeError:
|
||||
except (AttributeError, IndexError):
|
||||
logger.warning("retrieve_webfinger_document: invalid handle given: %s", handle)
|
||||
return None
|
||||
document, code, exception = fetch_document(
|
||||
|
|
|
@ -1,12 +1,19 @@
|
|||
import re
|
||||
from typing import Set, Tuple
|
||||
from typing import Set, List
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import bleach
|
||||
from bleach import callbacks
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import NavigableString
|
||||
from commonmark import commonmark
|
||||
|
||||
ILLEGAL_TAG_CHARS = "!#$%^&*+.,@£/()=?`'\\{[]}~;:\"’”—\xa0"
|
||||
|
||||
TAG_PATTERN = re.compile(r'(#[\w\-]+)([)\]_!?*%/.,;\s]+\s*|\Z)', re.UNICODE)
|
||||
# This will match non-matching braces. I don't think it's an issue.
|
||||
MENTION_PATTERN = re.compile(r'(@\{?(?:[\w\-. \u0250-\U0001f64f]*; *)?[\w]+@[\w\-.]+\.[\w]+}?)', re.UNICODE)
|
||||
# based on https://stackoverflow.com/a/6041965
|
||||
URL_PATTERN = re.compile(r'((?:(?:https?|ftp)://|^|(?<=[("<\s]))+(?:[\w\-]+(?:(?:\.[\w\-]+)+))'
|
||||
r'[\w.,;:@?!$()*^=%&/~+\-#]*(?<![:;,).>"]))',
|
||||
re.UNICODE)
|
||||
|
||||
def decode_if_bytes(text):
|
||||
try:
|
||||
|
@ -22,67 +29,38 @@ def encode_if_text(text):
|
|||
return text
|
||||
|
||||
|
||||
def find_tags(text: str, replacer: callable = None) -> Tuple[Set, str]:
|
||||
def find_tags(text: str) -> Set[str]:
|
||||
"""Find tags in text.
|
||||
|
||||
Tries to ignore tags inside code blocks.
|
||||
Ignore tags inside code blocks.
|
||||
|
||||
Optionally, if passed a "replacer", will also replace the tag word with the result
|
||||
of the replacer function called with the tag word.
|
||||
Returns a set of tags.
|
||||
|
||||
Returns a set of tags and the original or replaced text.
|
||||
"""
|
||||
found_tags = set()
|
||||
# <br> and <p> tags cause issues in us finding words - add some spacing around them
|
||||
new_text = text.replace("<br>", " <br> ").replace("<p>", " <p> ").replace("</p>", " </p> ")
|
||||
lines = new_text.splitlines(keepends=True)
|
||||
final_lines = []
|
||||
code_block = False
|
||||
final_text = None
|
||||
# Check each line separately
|
||||
for line in lines:
|
||||
final_words = []
|
||||
if line[0:3] == "```":
|
||||
code_block = not code_block
|
||||
if line.find("#") == -1 or line[0:4] == " " or code_block:
|
||||
# Just add the whole line
|
||||
final_lines.append(line)
|
||||
continue
|
||||
# Check each word separately
|
||||
words = line.split(" ")
|
||||
for word in words:
|
||||
if word.find('#') > -1:
|
||||
candidate = word.strip().strip("([]),.!?:*_%/")
|
||||
if candidate.find('<') > -1 or candidate.find('>') > -1:
|
||||
# Strip html
|
||||
candidate = bleach.clean(word, strip=True)
|
||||
# Now split with slashes
|
||||
candidates = candidate.split("/")
|
||||
to_replace = []
|
||||
for candidate in candidates:
|
||||
if candidate.startswith("#"):
|
||||
candidate = candidate.strip("#")
|
||||
if test_tag(candidate.lower()):
|
||||
found_tags.add(candidate.lower())
|
||||
to_replace.append(candidate)
|
||||
if replacer:
|
||||
tag_word = word
|
||||
try:
|
||||
for counter, replacee in enumerate(to_replace, 1):
|
||||
tag_word = tag_word.replace("#%s" % replacee, replacer(replacee))
|
||||
except Exception:
|
||||
pass
|
||||
final_words.append(tag_word)
|
||||
else:
|
||||
final_words.append(word)
|
||||
else:
|
||||
final_words.append(word)
|
||||
final_lines.append(" ".join(final_words))
|
||||
if replacer:
|
||||
final_text = "".join(final_lines)
|
||||
if final_text:
|
||||
final_text = final_text.replace(" <br> ", "<br>").replace(" <p> ", "<p>").replace(" </p> ", "</p>")
|
||||
return found_tags, final_text or text
|
||||
tags = find_elements(BeautifulSoup(commonmark(text, ignore_html_blocks=True), 'html.parser'),
|
||||
TAG_PATTERN)
|
||||
return set([tag.text.lstrip('#').lower() for tag in tags])
|
||||
|
||||
|
||||
def find_elements(soup: BeautifulSoup, pattern: re.Pattern) -> List[NavigableString]:
|
||||
"""
|
||||
Split a BeautifulSoup tree strings according to a pattern, replacing each element
|
||||
with a NavigableString. The returned list can be used to linkify the found
|
||||
elements.
|
||||
|
||||
:param soup: BeautifulSoup instance of the content being searched
|
||||
:param pattern: Compiled regular expression defined using a single group
|
||||
:return: A NavigableString list attached to the original soup
|
||||
"""
|
||||
final = []
|
||||
for candidate in soup.find_all(string=True):
|
||||
if candidate.parent.name == 'code': continue
|
||||
ns = [NavigableString(r) for r in pattern.split(candidate.text) if r]
|
||||
found = [s for s in ns if pattern.match(s.text)]
|
||||
if found:
|
||||
candidate.replace_with(*ns)
|
||||
final.extend(found)
|
||||
return final
|
||||
|
||||
|
||||
def get_path_from_url(url: str) -> str:
|
||||
|
@ -93,28 +71,6 @@ def get_path_from_url(url: str) -> str:
|
|||
return parsed.path
|
||||
|
||||
|
||||
def process_text_links(text):
|
||||
"""Process links in text, adding some attributes and linkifying textual links."""
|
||||
link_callbacks = [callbacks.nofollow, callbacks.target_blank]
|
||||
|
||||
def link_attributes(attrs, new=False):
|
||||
"""Run standard callbacks except for internal links."""
|
||||
href_key = (None, "href")
|
||||
if attrs.get(href_key).startswith("/"):
|
||||
return attrs
|
||||
|
||||
# Run the standard callbacks
|
||||
for callback in link_callbacks:
|
||||
attrs = callback(attrs, new)
|
||||
return attrs
|
||||
|
||||
return bleach.linkify(
|
||||
text,
|
||||
callbacks=[link_attributes],
|
||||
parse_email=False,
|
||||
skip_tags=["code"],
|
||||
)
|
||||
|
||||
|
||||
def test_tag(tag: str) -> bool:
|
||||
"""Test a word whether it could be accepted as a tag."""
|
||||
|
|
25
setup.py
25
setup.py
|
@ -6,13 +6,11 @@ from setuptools import setup, find_packages
|
|||
from federation import __version__
|
||||
|
||||
|
||||
description = 'Python library to abstract social web federation protocols like ActivityPub and Diaspora.'
|
||||
description = 'Python library to abstract social web federation protocols like ActivityPub, Matrix and Diaspora.'
|
||||
|
||||
|
||||
def get_long_description():
|
||||
return open(os.path.join(os.path.dirname(__file__), "docs", "introduction.rst")).read()
|
||||
|
||||
|
||||
setup(
|
||||
name='federation',
|
||||
version=__version__,
|
||||
|
@ -22,26 +20,35 @@ setup(
|
|||
author_email='mail@jasonrobinson.me',
|
||||
maintainer='Jason Robinson',
|
||||
maintainer_email='mail@jasonrobinson.me',
|
||||
url='https://git.feneas.org/jaywink/federation',
|
||||
url='https://gitlab.com/jaywink/federation',
|
||||
download_url='https://pypi.org/project/federation/',
|
||||
packages=find_packages(),
|
||||
license="BSD 3-clause",
|
||||
install_requires=[
|
||||
"attrs",
|
||||
"beautifulsoup4>=4.11.2",
|
||||
"bleach>3.0",
|
||||
"commonmark",
|
||||
"calamus",
|
||||
"commonmark_socialhome>=0.9.1.post2",
|
||||
"cryptography",
|
||||
"cssselect>=0.9.2",
|
||||
"dirty-validators>=0.3.0",
|
||||
"funcy",
|
||||
"lxml>=3.4.0",
|
||||
"iteration_utilities",
|
||||
"jsonschema>=2.0.0",
|
||||
"markdownify",
|
||||
"pycryptodome>=3.4.10",
|
||||
"python-dateutil>=2.4.0",
|
||||
"python-httpsig-socialhome",
|
||||
"python-magic",
|
||||
"python-slugify>=5.0.0",
|
||||
"python-xrd>=0.1",
|
||||
"pytz",
|
||||
"PyYAML",
|
||||
"redis",
|
||||
"requests>=2.8.0",
|
||||
"requests-http-signature-jaywink>=0.1.0.dev0",
|
||||
"requests-cache",
|
||||
],
|
||||
include_package_data=True,
|
||||
classifiers=[
|
||||
|
@ -51,13 +58,13 @@ setup(
|
|||
'License :: OSI Approved :: BSD License',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
'Topic :: Communications',
|
||||
'Topic :: Internet',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
],
|
||||
keywords='federation diaspora activitypub federate fediverse social',
|
||||
keywords='federation diaspora activitypub matrix protocols federate fediverse social',
|
||||
)
|
||||
|
|
2
tox.ini
2
tox.ini
|
@ -4,7 +4,7 @@
|
|||
# and then run "tox" from this directory.
|
||||
|
||||
[tox]
|
||||
envlist = py36
|
||||
envlist = py310
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
|
|
Ładowanie…
Reference in New Issue