fix(back):update denormalization table on upload update and trigger denormalization on bulk update of uploads

environments/review-docs-2422-qjbh7w/deployments/21515
Petitminion 2025-10-07 11:45:35 +02:00 zatwierdzone przez petitminion
rodzic d2c5de06d4
commit c0e518646a
2 zmienionych plików z 11 dodań i 2 usunięć

Wyświetl plik

@ -1567,7 +1567,13 @@ def update_batch_status(sender, instance, **kwargs):
@receiver(post_save, sender=Upload)
def update_denormalization_track_actor(sender, instance, created, **kwargs):
if (
created
(
created
or (
kwargs.get("update_fields", None)
and "library" in kwargs.get("update_fields")
)
)
and settings.MUSIC_USE_DENORMALIZATION
and instance.track_id
and instance.import_status == "finished"

Wyświetl plik

@ -569,8 +569,11 @@ class UploadBulkUpdateListSerializer(serializers.ListSerializer):
f"Upload with uuid {uuid} does not exist"
)
upload.library = privacy_level_map[data["privacy_level"]]
# bulk_update skip post-save signal (raw sql db query), we need the to update the denormalization table
# could optimize and work on a bulk denormalization table update. In the meantime we do it one by one
upload.save(update_fields=["library"])
objs.append(upload)
models.Upload.objects.bulk_update(objs, ["library"])
return objs