2018-04-13 19:31:40 +00:00
|
|
|
import datetime
|
2018-04-12 17:57:43 +00:00
|
|
|
import logging
|
2018-05-06 13:36:49 +00:00
|
|
|
import os
|
2018-12-27 16:42:43 +00:00
|
|
|
import requests
|
2018-04-12 17:57:43 +00:00
|
|
|
|
|
|
|
from django.conf import settings
|
2018-09-06 18:35:02 +00:00
|
|
|
from django.db.models import Q, F
|
2018-04-12 18:38:06 +00:00
|
|
|
from django.utils import timezone
|
2018-04-13 19:31:40 +00:00
|
|
|
from dynamic_preferences.registries import global_preferences_registry
|
2018-06-10 08:55:16 +00:00
|
|
|
from requests.exceptions import RequestException
|
2018-04-12 16:41:43 +00:00
|
|
|
|
2018-09-28 19:19:37 +00:00
|
|
|
from funkwhale_api.common import preferences
|
2018-04-12 17:57:43 +00:00
|
|
|
from funkwhale_api.common import session
|
2018-09-06 18:35:02 +00:00
|
|
|
from funkwhale_api.music import models as music_models
|
2018-04-11 21:13:33 +00:00
|
|
|
from funkwhale_api.taskapp import celery
|
|
|
|
|
2019-01-11 10:04:11 +00:00
|
|
|
from . import keys
|
2018-06-10 08:55:16 +00:00
|
|
|
from . import models, signing
|
2018-12-27 16:42:43 +00:00
|
|
|
from . import serializers
|
2018-09-06 18:35:02 +00:00
|
|
|
from . import routes
|
2018-04-12 17:57:43 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-06-09 13:36:16 +00:00
|
|
|
@celery.app.task(name="federation.clean_music_cache")
|
2018-04-13 19:31:40 +00:00
|
|
|
def clean_music_cache():
|
|
|
|
preferences = global_preferences_registry.manager()
|
2018-06-09 13:36:16 +00:00
|
|
|
delay = preferences["federation__music_cache_duration"]
|
2018-04-13 19:31:40 +00:00
|
|
|
if delay < 1:
|
|
|
|
return # cache clearing disabled
|
2018-05-06 13:36:49 +00:00
|
|
|
limit = timezone.now() - datetime.timedelta(minutes=delay)
|
2018-04-13 19:31:40 +00:00
|
|
|
|
2018-06-09 13:36:16 +00:00
|
|
|
candidates = (
|
2018-09-22 12:29:30 +00:00
|
|
|
music_models.Upload.objects.filter(
|
2018-06-09 13:36:16 +00:00
|
|
|
Q(audio_file__isnull=False)
|
2018-09-28 19:30:45 +00:00
|
|
|
& (Q(accessed_date__lt=limit) | Q(accessed_date=None)),
|
|
|
|
# library__actor__user=None,
|
2018-05-06 13:36:49 +00:00
|
|
|
)
|
2018-09-06 18:35:02 +00:00
|
|
|
.local(False)
|
2018-06-09 13:36:16 +00:00
|
|
|
.exclude(audio_file="")
|
|
|
|
.only("audio_file", "id")
|
2018-09-06 18:35:02 +00:00
|
|
|
.order_by("id")
|
2018-06-09 13:36:16 +00:00
|
|
|
)
|
2018-09-22 12:29:30 +00:00
|
|
|
for upload in candidates:
|
|
|
|
upload.audio_file.delete()
|
2018-05-06 13:36:49 +00:00
|
|
|
|
|
|
|
# we also delete orphaned files, if any
|
2018-06-09 13:36:16 +00:00
|
|
|
storage = models.LibraryTrack._meta.get_field("audio_file").storage
|
2018-09-06 18:35:02 +00:00
|
|
|
files = get_files(storage, "federation_cache/tracks")
|
2018-09-22 12:29:30 +00:00
|
|
|
existing = music_models.Upload.objects.filter(audio_file__in=files)
|
2018-06-09 13:36:16 +00:00
|
|
|
missing = set(files) - set(existing.values_list("audio_file", flat=True))
|
2018-05-06 13:36:49 +00:00
|
|
|
for m in missing:
|
|
|
|
storage.delete(m)
|
|
|
|
|
|
|
|
|
|
|
|
def get_files(storage, *parts):
|
|
|
|
"""
|
|
|
|
This is a recursive function that return all files available
|
|
|
|
in a given directory using django's storage.
|
|
|
|
"""
|
|
|
|
if not parts:
|
2018-06-09 13:36:16 +00:00
|
|
|
raise ValueError("Missing path")
|
2018-09-28 19:30:45 +00:00
|
|
|
try:
|
|
|
|
dirs, files = storage.listdir(os.path.join(*parts))
|
|
|
|
except FileNotFoundError:
|
|
|
|
return []
|
2018-05-06 13:36:49 +00:00
|
|
|
for dir in dirs:
|
|
|
|
files += get_files(storage, *(list(parts) + [dir]))
|
2018-06-09 13:36:16 +00:00
|
|
|
return [os.path.join(parts[-1], path) for path in files]
|
2018-09-06 18:35:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
@celery.app.task(name="federation.dispatch_inbox")
|
|
|
|
@celery.require_instance(models.Activity.objects.select_related(), "activity")
|
|
|
|
def dispatch_inbox(activity):
|
|
|
|
"""
|
|
|
|
Given an activity instance, triggers our internal delivery logic (follow
|
|
|
|
creation, etc.)
|
|
|
|
"""
|
|
|
|
|
2018-09-22 12:29:30 +00:00
|
|
|
routes.inbox.dispatch(
|
|
|
|
activity.payload,
|
|
|
|
context={
|
|
|
|
"activity": activity,
|
|
|
|
"actor": activity.actor,
|
|
|
|
"inbox_items": activity.inbox_items.filter(is_read=False),
|
|
|
|
},
|
|
|
|
)
|
2018-09-06 18:35:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
@celery.app.task(name="federation.dispatch_outbox")
|
|
|
|
@celery.require_instance(models.Activity.objects.select_related(), "activity")
|
|
|
|
def dispatch_outbox(activity):
|
|
|
|
"""
|
2018-09-22 12:29:30 +00:00
|
|
|
Deliver a local activity to its recipients, both locally and remotely
|
2018-09-06 18:35:02 +00:00
|
|
|
"""
|
2018-09-22 12:29:30 +00:00
|
|
|
inbox_items = activity.inbox_items.filter(is_read=False).select_related()
|
2018-09-06 18:35:02 +00:00
|
|
|
|
2018-09-22 12:29:30 +00:00
|
|
|
if inbox_items.exists():
|
|
|
|
dispatch_inbox.delay(activity_id=activity.pk)
|
2018-09-06 18:35:02 +00:00
|
|
|
|
2018-09-28 19:19:37 +00:00
|
|
|
if not preferences.get("federation__enabled"):
|
|
|
|
# federation is disabled, we only deliver to local recipients
|
|
|
|
return
|
|
|
|
|
|
|
|
deliveries = activity.deliveries.filter(is_delivered=False)
|
|
|
|
|
2018-09-22 12:29:30 +00:00
|
|
|
for id in deliveries.values_list("pk", flat=True):
|
|
|
|
deliver_to_remote.delay(delivery_id=id)
|
2018-09-06 18:35:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
@celery.app.task(
|
|
|
|
name="federation.deliver_to_remote_inbox",
|
|
|
|
autoretry_for=[RequestException],
|
|
|
|
retry_backoff=30,
|
|
|
|
max_retries=5,
|
|
|
|
)
|
2018-09-22 12:29:30 +00:00
|
|
|
@celery.require_instance(
|
|
|
|
models.Delivery.objects.filter(is_delivered=False).select_related(
|
|
|
|
"activity__actor"
|
|
|
|
),
|
|
|
|
"delivery",
|
|
|
|
)
|
|
|
|
def deliver_to_remote(delivery):
|
2018-09-28 19:19:37 +00:00
|
|
|
|
|
|
|
if not preferences.get("federation__enabled"):
|
|
|
|
# federation is disabled, we only deliver to local recipients
|
|
|
|
return
|
|
|
|
|
2018-09-22 12:29:30 +00:00
|
|
|
actor = delivery.activity.actor
|
|
|
|
logger.info("Preparing activity delivery to %s", delivery.inbox_url)
|
2018-09-06 18:35:02 +00:00
|
|
|
auth = signing.get_auth(actor.private_key, actor.private_key_id)
|
|
|
|
try:
|
|
|
|
response = session.get_session().post(
|
|
|
|
auth=auth,
|
2018-09-22 12:29:30 +00:00
|
|
|
json=delivery.activity.payload,
|
|
|
|
url=delivery.inbox_url,
|
2018-09-06 18:35:02 +00:00
|
|
|
timeout=5,
|
|
|
|
verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL,
|
|
|
|
headers={"Content-Type": "application/activity+json"},
|
|
|
|
)
|
|
|
|
logger.debug("Remote answered with %s", response.status_code)
|
|
|
|
response.raise_for_status()
|
|
|
|
except Exception:
|
2018-09-22 12:29:30 +00:00
|
|
|
delivery.last_attempt_date = timezone.now()
|
|
|
|
delivery.attempts = F("attempts") + 1
|
|
|
|
delivery.save(update_fields=["last_attempt_date", "attempts"])
|
2018-09-06 18:35:02 +00:00
|
|
|
raise
|
|
|
|
else:
|
2018-09-22 12:29:30 +00:00
|
|
|
delivery.last_attempt_date = timezone.now()
|
|
|
|
delivery.attempts = F("attempts") + 1
|
|
|
|
delivery.is_delivered = True
|
|
|
|
delivery.save(update_fields=["last_attempt_date", "attempts", "is_delivered"])
|
2018-12-27 16:42:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
def fetch_nodeinfo(domain_name):
|
|
|
|
s = session.get_session()
|
|
|
|
wellknown_url = "https://{}/.well-known/nodeinfo".format(domain_name)
|
|
|
|
response = s.get(
|
|
|
|
url=wellknown_url, timeout=5, verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL
|
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
serializer = serializers.NodeInfoSerializer(data=response.json())
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
nodeinfo_url = None
|
|
|
|
for link in serializer.validated_data["links"]:
|
|
|
|
if link["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0":
|
|
|
|
nodeinfo_url = link["href"]
|
|
|
|
break
|
|
|
|
|
|
|
|
response = s.get(
|
|
|
|
url=nodeinfo_url, timeout=5, verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL
|
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
return response.json()
|
|
|
|
|
|
|
|
|
|
|
|
@celery.app.task(name="federation.update_domain_nodeinfo")
|
|
|
|
@celery.require_instance(
|
|
|
|
models.Domain.objects.external(), "domain", id_kwarg_name="domain_name"
|
|
|
|
)
|
|
|
|
def update_domain_nodeinfo(domain):
|
|
|
|
now = timezone.now()
|
|
|
|
try:
|
|
|
|
nodeinfo = {"status": "ok", "payload": fetch_nodeinfo(domain.name)}
|
|
|
|
except (requests.RequestException, serializers.serializers.ValidationError) as e:
|
|
|
|
nodeinfo = {"status": "error", "error": str(e)}
|
|
|
|
domain.nodeinfo_fetch_date = now
|
|
|
|
domain.nodeinfo = nodeinfo
|
|
|
|
domain.save(update_fields=["nodeinfo", "nodeinfo_fetch_date"])
|
2019-01-09 13:18:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
def delete_qs(qs):
|
|
|
|
label = qs.model._meta.label
|
|
|
|
result = qs.delete()
|
|
|
|
related = sum(result[1].values())
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"Purged %s %s objects (and %s related entities)", result[0], label, related
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-01-10 10:02:09 +00:00
|
|
|
def handle_purge_actors(ids, only=[]):
|
|
|
|
"""
|
|
|
|
Empty only means we purge everything
|
|
|
|
Otherwise, we purge only the requested bits: media
|
|
|
|
"""
|
2019-01-09 13:18:32 +00:00
|
|
|
# purge follows (received emitted)
|
2019-01-10 10:02:09 +00:00
|
|
|
if not only:
|
|
|
|
delete_qs(models.LibraryFollow.objects.filter(target__actor_id__in=ids))
|
|
|
|
delete_qs(models.Follow.objects.filter(actor_id__in=ids))
|
2019-01-09 13:18:32 +00:00
|
|
|
|
|
|
|
# purge audio content
|
2019-01-10 10:02:09 +00:00
|
|
|
if not only or "media" in only:
|
|
|
|
delete_qs(models.LibraryFollow.objects.filter(actor_id__in=ids))
|
|
|
|
delete_qs(models.Follow.objects.filter(target_id__in=ids))
|
|
|
|
delete_qs(music_models.Upload.objects.filter(library__actor_id__in=ids))
|
|
|
|
delete_qs(music_models.Library.objects.filter(actor_id__in=ids))
|
2019-01-09 13:18:32 +00:00
|
|
|
|
|
|
|
# purge remaining activities / deliveries
|
2019-01-10 10:02:09 +00:00
|
|
|
if not only:
|
|
|
|
delete_qs(models.InboxItem.objects.filter(actor_id__in=ids))
|
|
|
|
delete_qs(models.Activity.objects.filter(actor_id__in=ids))
|
2019-01-09 13:18:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
@celery.app.task(name="federation.purge_actors")
|
2019-01-10 10:02:09 +00:00
|
|
|
def purge_actors(ids=[], domains=[], only=[]):
|
2019-01-09 13:18:32 +00:00
|
|
|
actors = models.Actor.objects.filter(
|
|
|
|
Q(id__in=ids) | Q(domain_id__in=domains)
|
|
|
|
).order_by("id")
|
|
|
|
found_ids = list(actors.values_list("id", flat=True))
|
|
|
|
logger.info("Starting purging %s accounts", len(found_ids))
|
2019-01-10 10:02:09 +00:00
|
|
|
handle_purge_actors(ids=found_ids, only=only)
|
2019-01-11 10:04:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
@celery.app.task(name="federation.rotate_actor_key")
|
|
|
|
@celery.require_instance(models.Actor.objects.local(), "actor")
|
|
|
|
def rotate_actor_key(actor):
|
|
|
|
pair = keys.get_key_pair()
|
|
|
|
actor.private_key = pair[0].decode()
|
|
|
|
actor.public_key = pair[1].decode()
|
|
|
|
actor.save(update_fields=["private_key", "public_key"])
|