2017-11-27 13:19:21 +00:00
|
|
|
import pytest
|
2019-04-28 11:47:43 +00:00
|
|
|
import time
|
2022-11-17 18:50:23 +00:00
|
|
|
from mastodon.Mastodon import MastodonAPIError, MastodonIllegalArgumentError, MastodonUnauthorizedError
|
|
|
|
import datetime
|
2022-11-17 20:00:37 +00:00
|
|
|
import pickle
|
|
|
|
import os
|
2017-11-27 13:19:21 +00:00
|
|
|
|
|
|
|
@pytest.mark.vcr()
|
2022-11-07 21:11:30 +00:00
|
|
|
def test_public_tl_anonymous(api_anonymous, status3):
|
|
|
|
time.sleep(3)
|
2017-11-27 13:35:02 +00:00
|
|
|
tl = api_anonymous.timeline_public()
|
2022-11-07 21:11:30 +00:00
|
|
|
assert status3['id'] in list(map(lambda st: st['id'], tl))
|
2017-11-27 13:19:21 +00:00
|
|
|
|
|
|
|
@pytest.mark.vcr()
|
2017-11-27 13:35:02 +00:00
|
|
|
def test_public_tl(api, status):
|
2017-11-27 14:26:20 +00:00
|
|
|
public = api.timeline_public()
|
|
|
|
local = api.timeline_local()
|
|
|
|
assert status['id'] in map(lambda st: st['id'], public)
|
|
|
|
assert status['id'] in map(lambda st: st['id'], local)
|
2017-11-27 13:19:21 +00:00
|
|
|
|
2018-01-03 10:44:14 +00:00
|
|
|
@pytest.mark.vcr()
|
|
|
|
def test_unauthed_home_tl_throws(api_anonymous, status):
|
|
|
|
with pytest.raises(MastodonUnauthorizedError):
|
|
|
|
api_anonymous.timeline_home()
|
|
|
|
|
2017-11-27 13:19:21 +00:00
|
|
|
@pytest.mark.vcr()
|
2017-11-27 13:35:02 +00:00
|
|
|
def test_home_tl(api, status):
|
2022-11-07 21:11:30 +00:00
|
|
|
time.sleep(3)
|
2017-11-27 13:35:02 +00:00
|
|
|
tl = api.timeline_home()
|
2017-11-27 13:19:21 +00:00
|
|
|
assert status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
|
|
|
|
@pytest.mark.vcr()
|
2017-11-27 13:35:02 +00:00
|
|
|
def test_hashtag_tl(api):
|
|
|
|
status = api.status_post('#hoot (hashtag toot)')
|
|
|
|
tl = api.timeline_hashtag('hoot')
|
2017-11-27 13:19:21 +00:00
|
|
|
try:
|
|
|
|
assert status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
finally:
|
2017-11-27 13:35:02 +00:00
|
|
|
api.status_delete(status['id'])
|
2017-11-27 13:35:56 +00:00
|
|
|
|
2017-11-29 21:34:13 +00:00
|
|
|
def test_hashtag_tl_leading_hash(api):
|
|
|
|
with pytest.raises(MastodonIllegalArgumentError):
|
|
|
|
api.timeline_hashtag('#hoot')
|
|
|
|
|
2017-11-27 13:35:56 +00:00
|
|
|
@pytest.mark.vcr()
|
|
|
|
def test_home_tl_anonymous_throws(api_anonymous):
|
|
|
|
with pytest.raises(MastodonAPIError):
|
2017-11-29 21:34:13 +00:00
|
|
|
api_anonymous.timeline_home()
|
2019-04-28 11:47:43 +00:00
|
|
|
|
|
|
|
@pytest.mark.vcr()
|
|
|
|
def test_conversations(api, api2):
|
|
|
|
account = api.account_verify_credentials()
|
|
|
|
status = api.status_post("@admin ilu bby ;3", visibility="direct")
|
|
|
|
time.sleep(2)
|
|
|
|
conversations = api2.conversations()
|
2019-04-28 11:58:18 +00:00
|
|
|
api2.conversations_read(conversations[0])
|
|
|
|
time.sleep(2)
|
|
|
|
conversations2 = api2.conversations()
|
2019-04-28 11:47:43 +00:00
|
|
|
api.status_delete(status)
|
|
|
|
assert conversations
|
|
|
|
assert status.id in map(lambda x: x.last_status.id, conversations)
|
|
|
|
assert account.id in map(lambda x: x.accounts[0].id, conversations)
|
2022-11-20 19:22:48 +00:00
|
|
|
assert conversations[0].unread is True
|
|
|
|
assert conversations2[0].unread is False
|
2022-11-17 18:50:23 +00:00
|
|
|
|
|
|
|
@pytest.mark.vcr()
|
|
|
|
def test_min_max_id(api, status):
|
|
|
|
time.sleep(3)
|
|
|
|
tl = api.timeline_home(min_id = status.id - 1000, max_id = status.id + 1000)
|
|
|
|
assert status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
|
|
|
|
tl = api.timeline_home(min_id = status.id - 2000, max_id = status.id - 1000)
|
|
|
|
assert not status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
|
|
|
|
tl = api.timeline_home(min_id = status.id + 1000, max_id = status.id + 2000)
|
|
|
|
assert not status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
|
|
|
|
tl = api.timeline_home(since_id = status.id - 1000)
|
|
|
|
assert status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
|
|
|
|
@pytest.mark.vcr()
|
|
|
|
def test_min_max_id_datetimes(api, status):
|
2022-11-17 20:00:37 +00:00
|
|
|
if os.path.exists("tests/cassettes/test_min_max_id_datetimes_datetimeobjects.pkl"):
|
2022-11-17 21:51:40 +00:00
|
|
|
data_dict = pickle.load(open("tests/cassettes/test_min_max_id_datetimes_datetimeobjects.pkl", 'rb'))
|
2022-11-17 22:19:17 +00:00
|
|
|
the_past = datetime.datetime.fromtimestamp(data_dict["the_past"])
|
|
|
|
the_future = datetime.datetime.fromtimestamp(data_dict["the_future"])
|
|
|
|
the_far_future = datetime.datetime.fromtimestamp(data_dict["the_far_future"])
|
2022-11-17 20:00:37 +00:00
|
|
|
else:
|
2022-11-17 22:06:24 +00:00
|
|
|
epoch_time = datetime.datetime.now().timestamp()
|
|
|
|
now = datetime.datetime.fromtimestamp(epoch_time)
|
|
|
|
the_past = now - datetime.timedelta(seconds=20)
|
|
|
|
the_future = now + datetime.timedelta(seconds=20)
|
|
|
|
the_far_future = now + datetime.timedelta(seconds=40)
|
2022-11-17 21:51:40 +00:00
|
|
|
pickle.dump({
|
2022-11-17 22:19:17 +00:00
|
|
|
"the_past": the_past.timestamp(),
|
|
|
|
"the_future": the_future.timestamp(),
|
|
|
|
"the_far_future": the_far_future.timestamp(),
|
2022-11-17 21:51:40 +00:00
|
|
|
}, open("tests/cassettes/test_min_max_id_datetimes_datetimeobjects.pkl", 'wb'))
|
2022-11-17 18:50:23 +00:00
|
|
|
|
|
|
|
time.sleep(3)
|
|
|
|
tl = api.timeline_home(min_id = the_past, max_id = the_future)
|
|
|
|
assert status['id'] in map(lambda st: st['id'], tl)
|
|
|
|
|
|
|
|
tl = api.timeline_home(min_id = the_future, max_id = the_far_future)
|
|
|
|
assert not status['id'] in map(lambda st: st['id'], tl)
|