diff --git a/activitypub.py b/activitypub.py index 4d1c129..db4a72b 100644 --- a/activitypub.py +++ b/activitypub.py @@ -250,20 +250,46 @@ def follower_collection(domain, collection): if not User.get_by_id(domain): return f'User {domain} not found', 404 - followers, before, after = common.fetch_followers(domain, collection) + # page + followers, new_before, new_after = common.fetch_followers(domain, collection) + items = [] + for f in followers: + f_as2 = f.to_as2() + if f_as2: + items.append(f_as2) + page = { + 'type': 'CollectionPage', + 'partOf': request.base_url, + 'items': items, + } + if new_before: + page['next'] = f'{request.base_url}?before={new_before}' + if new_after: + page['prev'] = f'{request.base_url}?after={new_after}' + + if 'before' in request.args or 'after' in request.args: + page.update({ + '@context': 'https://www.w3.org/ns/activitystreams', + 'id': request.url, + }) + logger.info(f'Returning {json_dumps(page, indent=2)}') + return page, {'Content-Type': as2.CONTENT_TYPE} + + # collection domain_prop = Follower.dest if collection == 'followers' else Follower.src - query = Follower.query( + count = Follower.query( Follower.status == 'active', domain_prop == domain, - ) - count = query.count() - ret = { + ).count() + + collection = { '@context': 'https://www.w3.org/ns/activitystreams', - 'summary': f"{domain}'s {collection}", + 'id': request.base_url, 'type': 'Collection', + 'summary': f"{domain}'s {collection}", 'totalItems': count, - 'items': [f.to_as2() for f in followers], + 'first': page, } - logger.info(f'Returning {json_dumps(ret, indent=2)}') - return ret, {'Content-Type': as2.CONTENT_TYPE} + logger.info(f'Returning {json_dumps(collection, indent=2)}') + return collection, {'Content-Type': as2.CONTENT_TYPE} diff --git a/common.py b/common.py index 0d2e002..b55dbdf 100644 --- a/common.py +++ b/common.py @@ -3,7 +3,7 @@ """ from base64 import b64encode import copy -import datetime +from datetime import timedelta, timezone from hashlib import sha256 import itertools import logging @@ -73,7 +73,7 @@ DOMAIN_BLOCKLIST = frozenset(( _DEFAULT_SIGNATURE_USER = None -CACHE_TIME = datetime.timedelta(seconds=10) +CACHE_TIME = timedelta(seconds=10) PAGE_SIZE = 20 @@ -639,17 +639,21 @@ def fetch_page(query, model_class): # TODO: unify this with Bridgy's user page def get_paging_param(param): val = request.values.get(param) - try: - return util.parse_iso8601(val.replace(' ', '+')) if val else None - except BaseException: - error(f"Couldn't parse {param}, {val!r} as ISO8601") + if val: + try: + dt = util.parse_iso8601(val.replace(' ', '+')) + except BaseException as e: + error(f"Couldn't parse {param}, {val!r} as ISO8601: {e}") + if dt.tzinfo: + dt = dt.astimezone(timezone.utc).replace(tzinfo=None) + return dt before = get_paging_param('before') after = get_paging_param('after') if before and after: error("can't handle both before and after") elif after: - query = query.filter(model_class.updated > after).order(model_class.updated) + query = query.filter(model_class.updated >= after).order(model_class.updated) elif before: query = query.filter(model_class.updated < before).order(-model_class.updated) else: diff --git a/tests/test_activitypub.py b/tests/test_activitypub.py index 5d90249..21181b7 100644 --- a/tests/test_activitypub.py +++ b/tests/test_activitypub.py @@ -4,6 +4,7 @@ TODO: test error handling """ import copy +from datetime import datetime, timedelta from unittest.mock import ANY, call, patch from granary import as2 @@ -657,15 +658,18 @@ class ActivityPubTest(testutil.TestCase): self.assertEqual(200, resp.status_code) self.assertEqual({ '@context': 'https://www.w3.org/ns/activitystreams', - 'summary': "foo.com's followers", + 'id': 'http://localhost/foo.com/followers', 'type': 'Collection', + 'summary': "foo.com's followers", 'totalItems': 0, - 'items': [], + 'first': { + 'type': 'CollectionPage', + 'partOf': 'http://localhost/foo.com/followers', + 'items': [], + }, }, resp.json) - def test_followers_collection(self, *args): - User.get_or_create('foo.com') - + def store_followers(self): Follower.get_or_create('foo.com', 'https://bar.com', last_follow=json_dumps(FOLLOW_WITH_ACTOR)) Follower.get_or_create('http://other/actor', 'foo.com') @@ -673,14 +677,42 @@ class ActivityPubTest(testutil.TestCase): last_follow=json_dumps(FOLLOW_WITH_ACTOR)) Follower.get_or_create('foo.com', 'baj.com', status='inactive') + def test_followers_collection(self, *args): + User.get_or_create('foo.com') + self.store_followers() + resp = self.client.get('/foo.com/followers') self.assertEqual(200, resp.status_code) self.assertEqual({ '@context': 'https://www.w3.org/ns/activitystreams', - 'summary': "foo.com's followers", + 'id': 'http://localhost/foo.com/followers', 'type': 'Collection', + 'summary': "foo.com's followers", 'totalItems': 2, - 'items': [ACTOR, ACTOR], + 'first': { + 'type': 'CollectionPage', + 'partOf': 'http://localhost/foo.com/followers', + 'items': [ACTOR, ACTOR], + }, + }, resp.json) + + @patch('common.PAGE_SIZE', 1) + def test_followers_collection_page(self, *args): + User.get_or_create('foo.com') + self.store_followers() + before = (datetime.utcnow() + timedelta(seconds=1)).isoformat() + next = Follower.get_by_id('foo.com https://baz.com').updated.isoformat() + + resp = self.client.get(f'/foo.com/followers?before={before}') + self.assertEqual(200, resp.status_code) + self.assertEqual({ + '@context': 'https://www.w3.org/ns/activitystreams', + 'id': f'http://localhost/foo.com/followers?before={before}', + 'type': 'CollectionPage', + 'partOf': 'http://localhost/foo.com/followers', + 'next': f'http://localhost/foo.com/followers?before={next}', + 'prev': f'http://localhost/foo.com/followers?after={before}', + 'items': [ACTOR], }, resp.json) def test_following_collection_unknown_user(self, *args): @@ -694,15 +726,18 @@ class ActivityPubTest(testutil.TestCase): self.assertEqual(200, resp.status_code) self.assertEqual({ '@context': 'https://www.w3.org/ns/activitystreams', + 'id': 'http://localhost/foo.com/following', 'summary': "foo.com's following", 'type': 'Collection', 'totalItems': 0, - 'items': [], + 'first': { + 'type': 'CollectionPage', + 'partOf': 'http://localhost/foo.com/following', + 'items': [], + }, }, resp.json) - def test_following_collection(self, *args): - User.get_or_create('foo.com') - + def store_following(self): Follower.get_or_create('https://bar.com', 'foo.com', last_follow=json_dumps(FOLLOW_WITH_OBJECT)) Follower.get_or_create('foo.com', 'http://other/actor') @@ -710,12 +745,40 @@ class ActivityPubTest(testutil.TestCase): last_follow=json_dumps(FOLLOW_WITH_OBJECT)) Follower.get_or_create('baj.com', 'foo.com', status='inactive') + def test_following_collection(self, *args): + User.get_or_create('foo.com') + self.store_following() + resp = self.client.get('/foo.com/following') self.assertEqual(200, resp.status_code) self.assertEqual({ '@context': 'https://www.w3.org/ns/activitystreams', + 'id': 'http://localhost/foo.com/following', 'summary': "foo.com's following", 'type': 'Collection', 'totalItems': 2, - 'items': [ACTOR, ACTOR], + 'first': { + 'type': 'CollectionPage', + 'partOf': 'http://localhost/foo.com/following', + 'items': [ACTOR, ACTOR], + }, + }, resp.json) + + @patch('common.PAGE_SIZE', 1) + def test_following_collection_page(self, *args): + User.get_or_create('foo.com') + self.store_following() + after = datetime(1900, 1, 1).isoformat() + prev = Follower.get_by_id('https://baz.com foo.com').updated.isoformat() + + resp = self.client.get(f'/foo.com/following?after={after}') + self.assertEqual(200, resp.status_code) + self.assertEqual({ + '@context': 'https://www.w3.org/ns/activitystreams', + 'id': f'http://localhost/foo.com/following?after={after}', + 'type': 'CollectionPage', + 'partOf': 'http://localhost/foo.com/following', + 'prev': f'http://localhost/foo.com/following?after={prev}', + 'next': f'http://localhost/foo.com/following?before={after}', + 'items': [ACTOR], }, resp.json)