2022-12-24 16:34:24 +00:00
|
|
|
"""app.bsky.feed.* XRPC methods."""
|
2022-12-30 18:10:49 +00:00
|
|
|
import json
|
2022-12-28 17:27:42 +00:00
|
|
|
import logging
|
2022-12-30 18:10:49 +00:00
|
|
|
import re
|
|
|
|
|
2023-03-20 21:28:14 +00:00
|
|
|
from flask import g
|
2023-01-13 23:35:05 +00:00
|
|
|
from granary import bluesky, microformats2
|
2022-12-30 18:10:49 +00:00
|
|
|
import mf2util
|
|
|
|
from oauth_dropins.webutil import util
|
2022-12-28 17:27:42 +00:00
|
|
|
|
2023-04-19 00:17:48 +00:00
|
|
|
from flask_app import xrpc_server
|
2023-03-08 21:10:41 +00:00
|
|
|
from models import Object, PAGE_SIZE, User
|
2023-05-27 00:40:29 +00:00
|
|
|
from web import Web
|
2022-12-25 05:08:12 +00:00
|
|
|
|
2022-12-28 17:27:42 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2022-12-25 05:08:12 +00:00
|
|
|
@xrpc_server.method('app.bsky.feed.getAuthorFeed')
|
2022-12-29 01:30:22 +00:00
|
|
|
def getAuthorFeed(input, author=None, limit=None, before=None):
|
2022-12-25 05:08:12 +00:00
|
|
|
"""
|
2022-12-29 01:30:22 +00:00
|
|
|
lexicons/app/bsky/feed/getAuthorFeed.json, feedViewPost.json
|
2022-12-25 05:08:12 +00:00
|
|
|
"""
|
2023-01-14 04:12:44 +00:00
|
|
|
if not author or not re.match(util.DOMAIN_RE, author):
|
2022-12-30 18:10:49 +00:00
|
|
|
raise ValueError(f'{author} is not a domain')
|
2022-12-29 01:30:22 +00:00
|
|
|
|
2023-05-27 00:40:29 +00:00
|
|
|
g.user = Web.get_by_id(author)
|
2023-03-20 21:28:14 +00:00
|
|
|
if not g.user:
|
2023-02-18 22:59:51 +00:00
|
|
|
raise ValueError(f'User {author} not found')
|
2023-03-20 21:28:14 +00:00
|
|
|
elif not g.user.actor_as2:
|
2023-02-18 22:59:51 +00:00
|
|
|
return ValueError(f'User {author} not fully set up')
|
|
|
|
|
|
|
|
# TODO: unify with pages.feed?
|
|
|
|
limit = min(limit or PAGE_SIZE, PAGE_SIZE)
|
|
|
|
objects, _, _ = Object.query(Object.domains == author, Object.labels == 'user') \
|
|
|
|
.order(-Object.created) \
|
|
|
|
.fetch_page(limit)
|
2023-02-24 03:17:26 +00:00
|
|
|
activities = [obj.as1 for obj in objects if not obj.deleted]
|
2022-12-30 18:10:49 +00:00
|
|
|
logger.info(f'AS1 activities: {json.dumps(activities, indent=2)}')
|
2022-12-29 01:30:22 +00:00
|
|
|
|
2022-12-30 18:10:49 +00:00
|
|
|
return {'feed': [bluesky.from_as1(a) for a in activities]}
|
2022-12-29 01:30:22 +00:00
|
|
|
|
2022-12-25 05:08:12 +00:00
|
|
|
|
|
|
|
@xrpc_server.method('app.bsky.feed.getPostThread')
|
2023-01-13 00:36:12 +00:00
|
|
|
def getPostThread(input, uri=None, depth=None):
|
2022-12-25 05:08:12 +00:00
|
|
|
"""
|
|
|
|
lexicons/app/bsky/feed/getPostThread.json
|
|
|
|
"""
|
2023-01-14 04:12:44 +00:00
|
|
|
if not uri:
|
|
|
|
raise ValueError('Missing uri')
|
|
|
|
|
2023-02-18 22:59:51 +00:00
|
|
|
obj = Object.get_by_id(uri)
|
|
|
|
if not obj:
|
|
|
|
raise ValueError(f'{uri} not found')
|
2023-01-13 00:36:12 +00:00
|
|
|
|
2023-02-24 03:17:26 +00:00
|
|
|
logger.info(f'AS1: {json.dumps(obj.as1, indent=2)}')
|
2023-01-13 00:36:12 +00:00
|
|
|
|
|
|
|
return {
|
|
|
|
'thread': {
|
2023-04-08 19:47:22 +00:00
|
|
|
'$type': 'app.bsky.feed.defs#threadViewPost',
|
2023-02-24 03:17:26 +00:00
|
|
|
'post': bluesky.from_as1(obj.as1)['post'],
|
2023-01-13 00:36:12 +00:00
|
|
|
'replies': [{
|
2023-04-08 19:47:22 +00:00
|
|
|
'$type': 'app.bsky.feed.defs#threadViewPost',
|
2023-01-13 00:36:12 +00:00
|
|
|
'post': bluesky.from_as1(reply)['post'],
|
2023-02-24 03:17:26 +00:00
|
|
|
} for reply in obj.as1.get('replies', {}).get('items', [])],
|
2023-01-13 00:36:12 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-12-25 05:08:12 +00:00
|
|
|
|
2023-02-18 22:59:51 +00:00
|
|
|
@xrpc_server.method('app.bsky.feed.getRepostedBy')
|
|
|
|
def getRepostedBy(input, uri=None, cid=None, limit=None, before=None):
|
|
|
|
"""
|
|
|
|
TODO: implement before, as query filter. what's input type? str or datetime?
|
|
|
|
lexicons/app/bsky/feed/getRepostedBy.json
|
|
|
|
"""
|
|
|
|
if not uri:
|
|
|
|
raise ValueError('Missing uri')
|
|
|
|
|
|
|
|
limit = min(limit or PAGE_SIZE, PAGE_SIZE)
|
|
|
|
objects, _, _ = Object.query(Object.object_ids == uri) \
|
|
|
|
.order(-Object.created) \
|
|
|
|
.fetch_page(limit)
|
2023-02-24 03:17:26 +00:00
|
|
|
activities = [obj.as1 for obj in objects if not obj.deleted]
|
2023-02-18 22:59:51 +00:00
|
|
|
logger.info(f'AS1 activities: {json.dumps(activities, indent=2)}')
|
|
|
|
|
|
|
|
return {
|
|
|
|
'uri': 'http://orig/post',
|
|
|
|
'repostBy': [{
|
2023-04-08 19:47:22 +00:00
|
|
|
**bluesky.from_as1(a['actor']),
|
2023-02-18 22:59:51 +00:00
|
|
|
'$type': 'app.bsky.feed.getRepostedBy#repostedBy',
|
|
|
|
} for a in activities if a.get('actor')],
|
|
|
|
}
|
2023-01-13 02:05:07 +00:00
|
|
|
|
|
|
|
|
2023-01-13 23:35:05 +00:00
|
|
|
# TODO: cursor
|
|
|
|
@xrpc_server.method('app.bsky.feed.getTimeline')
|
|
|
|
def getTimeline(input, algorithm=None, limit=50, before=None):
|
|
|
|
"""
|
|
|
|
lexicons/app/bsky/feed/getTimeline.json
|
|
|
|
"""
|
|
|
|
# TODO: how to get authed user?
|
2023-03-20 21:28:14 +00:00
|
|
|
domain = 'user.com'
|
2023-01-13 23:35:05 +00:00
|
|
|
|
|
|
|
# TODO: de-dupe with pages.feed()
|
2023-03-20 21:28:14 +00:00
|
|
|
logger.info(f'Fetching {limit} objects for {domain}')
|
|
|
|
objects, _, _ = Object.query(Object.domains == domain, Object.labels == 'feed') \
|
2023-02-18 22:59:51 +00:00
|
|
|
.order(-Object.created) \
|
|
|
|
.fetch_page(limit)
|
2023-01-13 23:35:05 +00:00
|
|
|
|
2023-02-24 03:17:26 +00:00
|
|
|
return {'feed': [bluesky.from_as1(obj.as1) for obj in objects if not obj.deleted]}
|
2022-12-25 05:08:12 +00:00
|
|
|
|
|
|
|
|
2023-04-08 19:47:22 +00:00
|
|
|
# TODO
|
|
|
|
@xrpc_server.method('app.bsky.feed.getLikes')
|
|
|
|
def getLikes(input, uri=None, direction=None, cid=None, limit=None, before=None):
|
2022-12-25 05:08:12 +00:00
|
|
|
"""
|
2023-04-08 19:47:22 +00:00
|
|
|
lexicons/app/bsky/feed/getLikes.json
|
2022-12-25 05:08:12 +00:00
|
|
|
"""
|
2023-01-13 00:47:30 +00:00
|
|
|
return {
|
|
|
|
'uri': uri,
|
2023-04-08 19:47:22 +00:00
|
|
|
'likes': [],
|
2023-01-13 00:47:30 +00:00
|
|
|
}
|