refactor: upgrade code to >=python3.7 (pre-commit)

Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2189>
environments/review-docs-docs-detqeo/deployments/15752
jo 2022-11-23 22:36:56 +01:00 zatwierdzone przez Marge
rodzic 7768ea77a4
commit 8d9946d35a
143 zmienionych plików z 454 dodań i 582 usunięć

Wyświetl plik

@ -28,7 +28,7 @@ _filters = {}
_hooks = {} _hooks = {}
class PluginCache(object): class PluginCache:
def __init__(self, prefix): def __init__(self, prefix):
self.prefix = prefix self.prefix = prefix
@ -81,7 +81,7 @@ def load_settings(name, settings):
"text": django_settings.ENV, "text": django_settings.ENV,
} }
values = {} values = {}
prefix = "FUNKWHALE_PLUGIN_{}".format(name.upper()) prefix = f"FUNKWHALE_PLUGIN_{name.upper()}"
for s in settings: for s in settings:
key = "_".join([prefix, s["name"].upper()]) key = "_".join([prefix, s["name"].upper()])
value = mapping[s["type"]](key, default=s.get("default", None)) value = mapping[s["type"]](key, default=s.get("default", None))
@ -262,7 +262,7 @@ def get_serializer_from_conf_template(conf, source=False, user=None):
self.fields["library"] = LibraryField(actor=user.actor) self.fields["library"] = LibraryField(actor=user.actor)
for vname, v in validators.items(): for vname, v in validators.items():
setattr(Serializer, "validate_{}".format(vname), v) setattr(Serializer, f"validate_{vname}", v)
return Serializer return Serializer

Wyświetl plik

@ -1 +0,0 @@
# -*- coding: utf-8 -*-

Wyświetl plik

@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging.config import logging.config
import sys import sys
from collections import OrderedDict from collections import OrderedDict
@ -170,7 +167,7 @@ else:
FUNKWHALE_PROTOCOL = FUNKWHALE_PROTOCOL.lower() FUNKWHALE_PROTOCOL = FUNKWHALE_PROTOCOL.lower()
FUNKWHALE_HOSTNAME = FUNKWHALE_HOSTNAME.lower() FUNKWHALE_HOSTNAME = FUNKWHALE_HOSTNAME.lower()
FUNKWHALE_URL = "{}://{}".format(FUNKWHALE_PROTOCOL, FUNKWHALE_HOSTNAME) FUNKWHALE_URL = f"{FUNKWHALE_PROTOCOL}://{FUNKWHALE_HOSTNAME}"
FUNKWHALE_SPA_HTML_ROOT = env( FUNKWHALE_SPA_HTML_ROOT = env(
"FUNKWHALE_SPA_HTML_ROOT", default=FUNKWHALE_URL + "/front/" "FUNKWHALE_SPA_HTML_ROOT", default=FUNKWHALE_URL + "/front/"
) )
@ -336,7 +333,7 @@ FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),)
# EMAIL # EMAIL
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env( DEFAULT_FROM_EMAIL = env(
"DEFAULT_FROM_EMAIL", default="Funkwhale <noreply@{}>".format(FUNKWHALE_HOSTNAME) "DEFAULT_FROM_EMAIL", default=f"Funkwhale <noreply@{FUNKWHALE_HOSTNAME}>"
) )
""" """
The name and email address used to send system emails. The name and email address used to send system emails.

Wyświetl plik

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" """
Local settings Local settings
@ -102,7 +101,7 @@ CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema" REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema"
SPECTACULAR_SETTINGS = { SPECTACULAR_SETTINGS = {
"TITLE": "Funkwhale API", "TITLE": "Funkwhale API",
"DESCRIPTION": open("Readme.md", "r").read(), "DESCRIPTION": open("Readme.md").read(),
"VERSION": funkwhale_version, "VERSION": funkwhale_version,
"SCHEMA_PATH_PREFIX": "/api/(v[0-9])?", "SCHEMA_PATH_PREFIX": "/api/(v[0-9])?",
"OAUTH_FLOWS": ["authorizationCode"], "OAUTH_FLOWS": ["authorizationCode"],

Wyświetl plik

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
""" """
Production Configurations Production Configurations
@ -9,7 +8,6 @@ Production Configurations
""" """
from __future__ import absolute_import, unicode_literals
from .common import * # noqa from .common import * # noqa

Wyświetl plik

@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings from django.conf import settings
from django.conf.urls import url from django.conf.urls import url
from django.conf.urls.static import static from django.conf.urls.static import static

Wyświetl plik

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
__version__ = "1.2.8" __version__ = "1.2.8"
__version_info__ = tuple( __version_info__ = tuple(
[ [

Wyświetl plik

@ -7,7 +7,7 @@ class ActivityConfig(AppConfig):
name = "funkwhale_api.activity" name = "funkwhale_api.activity"
def ready(self): def ready(self):
super(ActivityConfig, self).ready() super().ready()
app_names = [app.name for app in apps.app_configs.values()] app_names = [app.name for app in apps.app_configs.values()]
record.registry.autodiscover(app_names) record.registry.autodiscover(app_names)

Wyświetl plik

@ -17,7 +17,7 @@ def combined_recent(limit, **kwargs):
_qs_list = list(querysets.values()) _qs_list = list(querysets.values())
union_qs = _qs_list[0].union(*_qs_list[1:]) union_qs = _qs_list[0].union(*_qs_list[1:])
records = [] records = []
for row in union_qs.order_by("-{}".format(datetime_field))[:limit]: for row in union_qs.order_by(f"-{datetime_field}")[:limit]:
records.append( records.append(
{"type": row["__type"], "when": row[datetime_field], "pk": row["pk"]} {"type": row["__type"], "when": row[datetime_field], "pk": row["pk"]}
) )

Wyświetl plik

@ -15,7 +15,7 @@ def set_actor(o):
def get_rss_channel_name(): def get_rss_channel_name():
return "rssfeed-{}".format(uuid.uuid4()) return f"rssfeed-{uuid.uuid4()}"
@registry.register @registry.register

Wyświetl plik

@ -93,7 +93,7 @@ class Channel(models.Model):
suffix = self.actor.preferred_username suffix = self.actor.preferred_username
else: else:
suffix = self.actor.full_username suffix = self.actor.full_username
return federation_utils.full_url("/channels/{}".format(suffix)) return federation_utils.full_url(f"/channels/{suffix}")
def get_rss_url(self): def get_rss_url(self):
if not self.artist.is_local or self.is_external_rss: if not self.artist.is_local or self.is_external_rss:

Wyświetl plik

@ -62,7 +62,7 @@ class ChannelMetadataSerializer(serializers.Serializer):
if child not in categories.ITUNES_CATEGORIES[parent]: if child not in categories.ITUNES_CATEGORIES[parent]:
raise serializers.ValidationError( raise serializers.ValidationError(
'"{}" is not a valid subcategory for "{}"'.format(child, parent) f'"{child}" is not a valid subcategory for "{parent}"'
) )
return child return child
@ -319,7 +319,7 @@ def retrieve_feed(url):
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
if e.response: if e.response:
raise FeedFetchException( raise FeedFetchException(
"Error while fetching feed: HTTP {}".format(e.response.status_code) f"Error while fetching feed: HTTP {e.response.status_code}"
) )
raise FeedFetchException("Error while fetching feed: unknown error") raise FeedFetchException("Error while fetching feed: unknown error")
except requests.exceptions.Timeout: except requests.exceptions.Timeout:
@ -327,9 +327,9 @@ def retrieve_feed(url):
except requests.exceptions.ConnectionError: except requests.exceptions.ConnectionError:
raise FeedFetchException("Error while fetching feed: connection error") raise FeedFetchException("Error while fetching feed: connection error")
except requests.RequestException as e: except requests.RequestException as e:
raise FeedFetchException("Error while fetching feed: {}".format(e)) raise FeedFetchException(f"Error while fetching feed: {e}")
except Exception as e: except Exception as e:
raise FeedFetchException("Error while fetching feed: {}".format(e)) raise FeedFetchException(f"Error while fetching feed: {e}")
return response return response
@ -348,7 +348,7 @@ def get_channel_from_rss_url(url, raise_exception=False):
parsed_feed = feedparser.parse(response.text) parsed_feed = feedparser.parse(response.text)
serializer = RssFeedSerializer(data=parsed_feed["feed"]) serializer = RssFeedSerializer(data=parsed_feed["feed"])
if not serializer.is_valid(raise_exception=raise_exception): if not serializer.is_valid(raise_exception=raise_exception):
raise FeedFetchException("Invalid xml content: {}".format(serializer.errors)) raise FeedFetchException(f"Invalid xml content: {serializer.errors}")
# second mrf check with validated data # second mrf check with validated data
urls_to_check = set() urls_to_check = set()
@ -516,7 +516,7 @@ class RssFeedSerializer(serializers.Serializer):
else: else:
artist_kwargs = {"pk": None} artist_kwargs = {"pk": None}
actor_kwargs = {"pk": None} actor_kwargs = {"pk": None}
preferred_username = "rssfeed-{}".format(uuid.uuid4()) preferred_username = f"rssfeed-{uuid.uuid4()}"
actor_defaults = { actor_defaults = {
"preferred_username": preferred_username, "preferred_username": preferred_username,
"type": "Application", "type": "Application",
@ -594,7 +594,7 @@ class ItunesDurationField(serializers.CharField):
try: try:
int_parts.append(int(part)) int_parts.append(int(part))
except (ValueError, TypeError): except (ValueError, TypeError):
raise serializers.ValidationError("Invalid duration {}".format(v)) raise serializers.ValidationError(f"Invalid duration {v}")
if len(int_parts) == 2: if len(int_parts) == 2:
hours = 0 hours = 0
@ -602,7 +602,7 @@ class ItunesDurationField(serializers.CharField):
elif len(int_parts) == 3: elif len(int_parts) == 3:
hours, minutes, seconds = int_parts hours, minutes, seconds = int_parts
else: else:
raise serializers.ValidationError("Invalid duration {}".format(v)) raise serializers.ValidationError(f"Invalid duration {v}")
return (hours * 3600) + (minutes * 60) + seconds return (hours * 3600) + (minutes * 60) + seconds
@ -782,8 +782,8 @@ class RssFeedItemSerializer(serializers.Serializer):
# update or create, so we restore the cache by hand # update or create, so we restore the cache by hand
if existing_track: if existing_track:
for field in ["attachment_cover", "description"]: for field in ["attachment_cover", "description"]:
cached_id_value = getattr(existing_track, "{}_id".format(field)) cached_id_value = getattr(existing_track, f"{field}_id")
new_id_value = getattr(track, "{}_id".format(field)) new_id_value = getattr(track, f"{field}_id")
if new_id_value and cached_id_value == new_id_value: if new_id_value and cached_id_value == new_id_value:
setattr(track, field, getattr(existing_track, field)) setattr(track, field, getattr(existing_track, field))

Wyświetl plik

@ -61,7 +61,7 @@ def channel_detail(query, redirect_to_ap):
"rel": "alternate", "rel": "alternate",
"type": "application/rss+xml", "type": "application/rss+xml",
"href": obj.get_rss_url(), "href": obj.get_rss_url(),
"title": "{} - RSS Podcast Feed".format(obj.artist.name), "title": f"{obj.artist.name} - RSS Podcast Feed",
}, },
) )
@ -73,7 +73,7 @@ def channel_detail(query, redirect_to_ap):
"type": "application/json+oembed", "type": "application/json+oembed",
"href": ( "href": (
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed")) utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
+ "?format=json&url={}".format(urllib.parse.quote_plus(obj_url)) + f"?format=json&url={urllib.parse.quote_plus(obj_url)}"
), ),
} }
) )

Wyświetl plik

@ -31,7 +31,7 @@ ARTIST_PREFETCH_QS = (
) )
class ChannelsMixin(object): class ChannelsMixin:
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
if not preferences.get("audio__channels_enabled"): if not preferences.get("audio__channels_enabled"):
return http.HttpResponse(status=405) return http.HttpResponse(status=405)

Wyświetl plik

@ -20,7 +20,7 @@ def handler_add_tags_from_tracks(
if result is None: if result is None:
click.echo(" No relevant tags found") click.echo(" No relevant tags found")
else: else:
click.echo(" Relevant tags added to {} objects".format(len(result))) click.echo(f" Relevant tags added to {len(result)} objects")
@base.cli.group() @base.cli.group()

Wyświetl plik

@ -16,7 +16,7 @@ def invoke():
except ValidationError as e: except ValidationError as e:
click.secho("Invalid data:", fg="red") click.secho("Invalid data:", fg="red")
for field, errors in e.detail.items(): for field, errors in e.detail.items():
click.secho(" {}:".format(field), fg="red") click.secho(f" {field}:", fg="red")
for error in errors: for error in errors:
click.secho(" - {}".format(error), fg="red") click.secho(f" - {error}", fg="red")
sys.exit(1) sys.exit(1)

Wyświetl plik

@ -39,19 +39,15 @@ def generate_thumbnails(delete):
(Attachment, "file", "attachment_square"), (Attachment, "file", "attachment_square"),
] ]
for model, attribute, key_set in MODELS: for model, attribute, key_set in MODELS:
click.echo( click.echo(f"Generating thumbnails for {model._meta.label}.{attribute}")
"Generating thumbnails for {}.{}".format(model._meta.label, attribute) qs = model.objects.exclude(**{f"{attribute}__isnull": True})
)
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True})
qs = qs.exclude(**{attribute: ""}) qs = qs.exclude(**{attribute: ""})
cache_key = "*{}{}*".format( cache_key = "*{}{}*".format(
settings.MEDIA_URL, vif_settings.VERSATILEIMAGEFIELD_SIZED_DIRNAME settings.MEDIA_URL, vif_settings.VERSATILEIMAGEFIELD_SIZED_DIRNAME
) )
entries = cache.keys(cache_key) entries = cache.keys(cache_key)
if entries: if entries:
click.echo( click.echo(f" Clearing {len(entries)} cache entries: {cache_key}")
" Clearing {} cache entries: {}".format(len(entries), cache_key)
)
for keys in common_utils.batch(iter(entries)): for keys in common_utils.batch(iter(entries)):
cache.delete_many(keys) cache.delete_many(keys)
warmer = VersatileImageFieldWarmer( warmer = VersatileImageFieldWarmer(
@ -62,6 +58,4 @@ def generate_thumbnails(delete):
) )
click.echo(" Creating images") click.echo(" Creating images")
num_created, failed_to_create = warmer.warm() num_created, failed_to_create = warmer.warm()
click.echo( click.echo(f" {num_created} created, {len(failed_to_create)} in error")
" {} created, {} in error".format(num_created, len(failed_to_create))
)

Wyświetl plik

@ -7,7 +7,7 @@ from funkwhale_api.users import models, serializers, tasks
from . import base, utils from . import base, utils
class FakeRequest(object): class FakeRequest:
def __init__(self, session={}): def __init__(self, session={}):
self.session = session self.session = session
@ -44,7 +44,7 @@ def handler_create_user(
for permission in permissions: for permission in permissions:
if permission in models.PERMISSIONS: if permission in models.PERMISSIONS:
utils.logger.debug("Setting %s permission to True", permission) utils.logger.debug("Setting %s permission to True", permission)
setattr(user, "permission_{}".format(permission), True) setattr(user, f"permission_{permission}", True)
else: else:
utils.logger.warn("Unknown permission %s", permission) utils.logger.warn("Unknown permission %s", permission)
utils.logger.debug("Creating actor…") utils.logger.debug("Creating actor…")
@ -56,7 +56,7 @@ def handler_create_user(
@transaction.atomic @transaction.atomic
def handler_delete_user(usernames, soft=True): def handler_delete_user(usernames, soft=True):
for username in usernames: for username in usernames:
click.echo("Deleting {}".format(username)) click.echo(f"Deleting {username}")
actor = None actor = None
user = None user = None
try: try:
@ -178,9 +178,9 @@ def create(username, password, email, superuser, staff, permission, upload_quota
permissions=permission, permissions=permission,
upload_quota=upload_quota, upload_quota=upload_quota,
) )
click.echo("User {} created!".format(user.username)) click.echo(f"User {user.username} created!")
if generated_password: if generated_password:
click.echo(" Generated password: {}".format(generated_password)) click.echo(f" Generated password: {generated_password}")
@base.delete_command(group=users, id_var="username") @base.delete_command(group=users, id_var="username")

Wyświetl plik

@ -16,7 +16,7 @@ class UnverifiedEmail(Exception):
def resend_confirmation_email(request, user): def resend_confirmation_email(request, user):
THROTTLE_DELAY = 500 THROTTLE_DELAY = 500
cache_key = "auth:resent-email-confirmation:{}".format(user.pk) cache_key = f"auth:resent-email-confirmation:{user.pk}"
if cache.get(cache_key): if cache.get(cache_key):
return False return False
@ -34,7 +34,7 @@ class OAuth2Authentication(BaseOAuth2Authentication):
resend_confirmation_email(request, e.user) resend_confirmation_email(request, e.user)
class ApplicationTokenAuthentication(object): class ApplicationTokenAuthentication:
def authenticate(self, request): def authenticate(self, request):
try: try:
header = request.headers["Authorization"] header = request.headers["Authorization"]

Wyświetl plik

@ -24,9 +24,9 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
if user.is_anonymous: if user.is_anonymous:
return models.Q(**{lookup_field: "everyone"}) return models.Q(**{lookup_field: "everyone"})
return models.Q( return models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]}) | models.Q(
**{"{}__in".format(lookup_field): ["instance", "everyone"]} **{lookup_field: "me", user_field: user}
) | models.Q(**{lookup_field: "me", user_field: user}) )
class SearchFilter(django_filters.CharFilter): class SearchFilter(django_filters.CharFilter):
@ -97,7 +97,7 @@ def get_generic_filter_query(value, relation_name, choices):
obj = related_queryset.get(obj_query) obj = related_queryset.get(obj_query)
except related_queryset.model.DoesNotExist: except related_queryset.model.DoesNotExist:
raise forms.ValidationError("Invalid object") raise forms.ValidationError("Invalid object")
filter_query &= models.Q(**{"{}_id".format(relation_name): obj.id}) filter_query &= models.Q(**{f"{relation_name}_id": obj.id})
return filter_query return filter_query
@ -163,7 +163,7 @@ class GenericRelation(serializers.JSONField):
id_value = v[id_attr] id_value = v[id_attr]
id_value = id_field.to_internal_value(id_value) id_value = id_field.to_internal_value(id_value)
except (TypeError, KeyError, serializers.ValidationError): except (TypeError, KeyError, serializers.ValidationError):
raise serializers.ValidationError("Invalid {}".format(id_attr)) raise serializers.ValidationError(f"Invalid {id_attr}")
query_getter = conf.get( query_getter = conf.get(
"get_query", lambda attr, value: models.Q(**{attr: value}) "get_query", lambda attr, value: models.Q(**{attr: value})

Wyświetl plik

@ -7,7 +7,7 @@ from drf_spectacular.utils import extend_schema_field
from . import fields, models, search, utils from . import fields, models, search, utils
class NoneObject(object): class NoneObject:
def __eq__(self, other): def __eq__(self, other):
return other.__class__ == NoneObject return other.__class__ == NoneObject
@ -46,7 +46,7 @@ class CoerceChoiceField(forms.ChoiceField):
try: try:
return [b for a, b in self.choices if v == a][0] return [b for a, b in self.choices if v == a][0]
except IndexError: except IndexError:
raise forms.ValidationError("Invalid value {}".format(value)) raise forms.ValidationError(f"Invalid value {value}")
@extend_schema_field(bool) @extend_schema_field(bool)
@ -63,9 +63,7 @@ class NullBooleanFilter(filters.ChoiceFilter):
return qs return qs
if value == NONE: if value == NONE:
value = None value = None
qs = self.get_method(qs)( qs = self.get_method(qs)(**{f"{self.field_name}__{self.lookup_expr}": value})
**{"%s__%s" % (self.field_name, self.lookup_expr): value}
)
return qs.distinct() if self.distinct else qs return qs.distinct() if self.distinct else qs
@ -217,7 +215,7 @@ class ActorScopeFilter(filters.CharFilter):
if not self.library_field: if not self.library_field:
predicate = "pk__in" predicate = "pk__in"
else: else:
predicate = "{}__in".format(self.library_field) predicate = f"{self.library_field}__in"
return Q(**{predicate: followed_libraries}) return Q(**{predicate: followed_libraries})
elif scope.startswith("actor:"): elif scope.startswith("actor:"):
@ -234,7 +232,7 @@ class ActorScopeFilter(filters.CharFilter):
return Q(**{self.actor_field: actor}) return Q(**{self.actor_field: actor})
elif scope.startswith("domain:"): elif scope.startswith("domain:"):
domain = scope.split("domain:", 1)[1] domain = scope.split("domain:", 1)[1]
return Q(**{"{}__domain_id".format(self.actor_field): domain}) return Q(**{f"{self.actor_field}__domain_id": domain})
else: else:
raise EmptyQuerySet() raise EmptyQuerySet()

Wyświetl plik

@ -50,7 +50,7 @@ class Command(BaseCommand):
self.stdout.write(self.style.SUCCESS(name)) self.stdout.write(self.style.SUCCESS(name))
self.stdout.write("") self.stdout.write("")
for line in script["help"].splitlines(): for line in script["help"].splitlines():
self.stdout.write(" {}".format(line)) self.stdout.write(f" {line}")
self.stdout.write("") self.stdout.write("")
def get_scripts(self): def get_scripts(self):

Wyświetl plik

@ -78,7 +78,7 @@ def serve_spa(request):
# We add the style add the end of the body to ensure it has the highest # We add the style add the end of the body to ensure it has the highest
# priority (since it will come after other stylesheets) # priority (since it will come after other stylesheets)
body, tail = tail.split("</body>", 1) body, tail = tail.split("</body>", 1)
css = "<style>{}</style>".format(css) css = f"<style>{css}</style>"
tail = body + "\n" + css + "\n</body>" + tail tail = body + "\n" + css + "\n</body>" + tail
# set a csrf token so that visitor can login / query API if needed # set a csrf token so that visitor can login / query API if needed
@ -93,13 +93,13 @@ TITLE_REGEX = re.compile(r"<title>.*</title>")
def replace_manifest_url(head, new_url): def replace_manifest_url(head, new_url):
replacement = '<link rel=manifest href="{}">'.format(new_url) replacement = f'<link rel=manifest href="{new_url}">'
head = MANIFEST_LINK_REGEX.sub(replacement, head) head = MANIFEST_LINK_REGEX.sub(replacement, head)
return head return head
def replace_title(head, new_title): def replace_title(head, new_title):
replacement = "<title>{}</title>".format(html.escape(new_title)) replacement = f"<title>{html.escape(new_title)}</title>"
head = TITLE_REGEX.sub(replacement, head) head = TITLE_REGEX.sub(replacement, head)
return head return head
@ -117,7 +117,7 @@ def get_spa_file(spa_url, name):
# we try to open a local file # we try to open a local file
with open(path, "rb") as f: with open(path, "rb") as f:
return f.read().decode("utf-8") return f.read().decode("utf-8")
cache_key = "spa-file:{}:{}".format(spa_url, name) cache_key = f"spa-file:{spa_url}:{name}"
cached = caches["local"].get(cache_key) cached = caches["local"].get(cache_key)
if cached: if cached:
return cached return cached
@ -170,11 +170,7 @@ def render_tags(tags):
yield "<{tag} {attrs} />".format( yield "<{tag} {attrs} />".format(
tag=tag.pop("tag"), tag=tag.pop("tag"),
attrs=" ".join( attrs=" ".join(
[ [f'{a}="{html.escape(str(v))}"' for a, v in sorted(tag.items()) if v]
'{}="{}"'.format(a, html.escape(str(v)))
for a, v in sorted(tag.items())
if v
]
), ),
) )

Wyświetl plik

@ -3,7 +3,7 @@ from django.shortcuts import get_object_or_404
from rest_framework import serializers from rest_framework import serializers
class MultipleLookupDetailMixin(object): class MultipleLookupDetailMixin:
lookup_value_regex = "[^/]+" lookup_value_regex = "[^/]+"
lookup_field = "composite" lookup_field = "composite"

Wyświetl plik

@ -36,7 +36,7 @@ class NotEqual(Lookup):
lhs, lhs_params = self.process_lhs(compiler, connection) lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection) rhs, rhs_params = self.process_rhs(compiler, connection)
params = lhs_params + rhs_params params = lhs_params + rhs_params
return "%s <> %s" % (lhs, rhs), params return f"{lhs} <> {rhs}", params
class NullsLastSQLCompiler(SQLCompiler): class NullsLastSQLCompiler(SQLCompiler):
@ -77,8 +77,8 @@ class NullsLastQuerySet(models.QuerySet):
class LocalFromFidQuerySet: class LocalFromFidQuerySet:
def local(self, include=True): def local(self, include=True):
host = settings.FEDERATION_HOSTNAME host = settings.FEDERATION_HOSTNAME
query = models.Q(fid__startswith="http://{}/".format(host)) | models.Q( query = models.Q(fid__startswith=f"http://{host}/") | models.Q(
fid__startswith="https://{}/".format(host) fid__startswith=f"https://{host}/"
) )
if include: if include:
return self.filter(query) return self.filter(query)
@ -362,7 +362,7 @@ CONTENT_FKS = {
def remove_attached_content(sender, instance, **kwargs): def remove_attached_content(sender, instance, **kwargs):
fk_fields = CONTENT_FKS.get(instance._meta.label, []) fk_fields = CONTENT_FKS.get(instance._meta.label, [])
for field in fk_fields: for field in fk_fields:
if getattr(instance, "{}_id".format(field)): if getattr(instance, f"{field}_id"):
try: try:
getattr(instance, field).delete() getattr(instance, field).delete()
except Content.DoesNotExist: except Content.DoesNotExist:

Wyświetl plik

@ -43,7 +43,7 @@ class Registry(persisting_theory.Registry):
def has_perm(self, perm, type, obj, actor): def has_perm(self, perm, type, obj, actor):
if perm not in ["approve", "suggest"]: if perm not in ["approve", "suggest"]:
raise ValueError("Invalid permission {}".format(perm)) raise ValueError(f"Invalid permission {perm}")
conf = self.get_conf(type, obj) conf = self.get_conf(type, obj)
checker = conf["perm_checkers"].get(perm) checker = conf["perm_checkers"].get(perm)
if not checker: if not checker:
@ -54,7 +54,7 @@ class Registry(persisting_theory.Registry):
try: try:
type_conf = self[type] type_conf = self[type]
except KeyError: except KeyError:
raise ConfNotFound("{} is not a registered mutation".format(type)) raise ConfNotFound(f"{type} is not a registered mutation")
try: try:
conf = type_conf[obj.__class__] conf = type_conf[obj.__class__]
@ -63,7 +63,7 @@ class Registry(persisting_theory.Registry):
conf = type_conf[None] conf = type_conf[None]
except KeyError: except KeyError:
raise ConfNotFound( raise ConfNotFound(
"No mutation configuration found for {}".format(obj.__class__) f"No mutation configuration found for {obj.__class__}"
) )
return conf return conf

Wyświetl plik

@ -7,7 +7,7 @@ from dynamic_preferences import serializers, types
from dynamic_preferences.registries import global_preferences_registry from dynamic_preferences.registries import global_preferences_registry
class DefaultFromSettingMixin(object): class DefaultFromSettingMixin:
def get_default(self): def get_default(self):
return getattr(settings, self.setting) return getattr(settings, self.setting)
@ -38,7 +38,7 @@ class StringListSerializer(serializers.BaseSerializer):
if type(value) not in [list, tuple]: if type(value) not in [list, tuple]:
raise cls.exception( raise cls.exception(
"Cannot serialize, value {} is not a list or a tuple".format(value) f"Cannot serialize, value {value} is not a list or a tuple"
) )
if cls.sort: if cls.sort:
@ -57,7 +57,7 @@ class StringListPreference(types.BasePreferenceType):
field_class = forms.MultipleChoiceField field_class = forms.MultipleChoiceField
def get_api_additional_data(self): def get_api_additional_data(self):
d = super(StringListPreference, self).get_api_additional_data() d = super().get_api_additional_data()
d["choices"] = self.get("choices") d["choices"] = self.get("choices")
return d return d
@ -72,14 +72,14 @@ class JSONSerializer(serializers.BaseSerializer):
data_serializer = cls.data_serializer_class(data=value) data_serializer = cls.data_serializer_class(data=value)
if not data_serializer.is_valid(): if not data_serializer.is_valid():
raise cls.exception( raise cls.exception(
"{} is not a valid value: {}".format(value, data_serializer.errors) f"{value} is not a valid value: {data_serializer.errors}"
) )
value = data_serializer.validated_data value = data_serializer.validated_data
try: try:
return json.dumps(value, sort_keys=True) return json.dumps(value, sort_keys=True)
except TypeError: except TypeError:
raise cls.exception( raise cls.exception(
"Cannot serialize, value {} is not JSON serializable".format(value) f"Cannot serialize, value {value} is not JSON serializable"
) )
@classmethod @classmethod

Wyświetl plik

@ -9,15 +9,13 @@ from funkwhale_api.users.models import User, create_actor
def main(command, **kwargs): def main(command, **kwargs):
qs = User.objects.filter(actor__isnull=True).order_by("username") qs = User.objects.filter(actor__isnull=True).order_by("username")
total = len(qs) total = len(qs)
command.stdout.write("{} users found without actors".format(total)) command.stdout.write(f"{total} users found without actors")
for i, user in enumerate(qs): for i, user in enumerate(qs):
command.stdout.write( command.stdout.write(f"{i + 1}/{total} creating actor for {user.username}")
"{}/{} creating actor for {}".format(i + 1, total, user.username)
)
try: try:
user.actor = create_actor(user) user.actor = create_actor(user)
except IntegrityError as e: except IntegrityError as e:
# somehow, an actor with the the url exists in the database # somehow, an actor with the the url exists in the database
command.stderr.write("Error while creating actor: {}".format(str(e))) command.stderr.write(f"Error while creating actor: {str(e)}")
continue continue
user.save(update_fields=["actor"]) user.save(update_fields=["actor"])

Wyświetl plik

@ -13,7 +13,7 @@ MODELS = [
def main(command, **kwargs): def main(command, **kwargs):
for model, attribute, key_set in MODELS: for model, attribute, key_set in MODELS:
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True}) qs = model.objects.exclude(**{f"{attribute}__isnull": True})
qs = qs.exclude(**{attribute: ""}) qs = qs.exclude(**{attribute: ""})
warmer = VersatileImageFieldWarmer( warmer = VersatileImageFieldWarmer(
instance_or_queryset=qs, instance_or_queryset=qs,
@ -21,10 +21,8 @@ def main(command, **kwargs):
image_attr=attribute, image_attr=attribute,
verbose=True, verbose=True,
) )
command.stdout.write( command.stdout.write(f"Creating images for {model.__name__} / {attribute}")
"Creating images for {} / {}".format(model.__name__, attribute)
)
num_created, failed_to_create = warmer.warm() num_created, failed_to_create = warmer.warm()
command.stdout.write( command.stdout.write(
" {} created, {} in error".format(num_created, len(failed_to_create)) f" {num_created} created, {len(failed_to_create)} in error"
) )

Wyświetl plik

@ -10,5 +10,5 @@ def main(command, **kwargs):
source__startswith="http", source__contains="/federation/music/file/" source__startswith="http", source__contains="/federation/music/file/"
).exclude(source__contains="youtube") ).exclude(source__contains="youtube")
total = queryset.count() total = queryset.count()
command.stdout.write("{} uploads found".format(total)) command.stdout.write(f"{total} uploads found")
queryset.delete() queryset.delete()

Wyświetl plik

@ -23,6 +23,6 @@ def main(command, **kwargs):
total = users.count() total = users.count()
command.stdout.write( command.stdout.write(
"Updating {} users with {} permission...".format(total, user_permission) f"Updating {total} users with {user_permission} permission..."
) )
users.update(**{"permission_{}".format(user_permission): True}) users.update(**{f"permission_{user_permission}": True})

Wyświetl plik

@ -36,9 +36,7 @@ def create_libraries(open_api, stdout):
) )
libraries_by_user[library.actor.user.pk] = library.pk libraries_by_user[library.actor.user.pk] = library.pk
if created: if created:
stdout.write( stdout.write(f" * Created library {library.pk} for user {a.user.pk}")
" * Created library {} for user {}".format(library.pk, a.user.pk)
)
else: else:
stdout.write( stdout.write(
" * Found existing library {} for user {}".format( " * Found existing library {} for user {}".format(
@ -60,13 +58,9 @@ def update_uploads(libraries_by_user, stdout):
) )
total = candidates.update(library=library_id, import_status="finished") total = candidates.update(library=library_id, import_status="finished")
if total: if total:
stdout.write( stdout.write(f" * Assigned {total} uploads to user {user_id}'s library")
" * Assigned {} uploads to user {}'s library".format(total, user_id)
)
else: else:
stdout.write( stdout.write(f" * No uploads to assign to user {user_id}'s library")
" * No uploads to assign to user {}'s library".format(user_id)
)
def update_orphan_uploads(open_api, stdout): def update_orphan_uploads(open_api, stdout):
@ -105,14 +99,12 @@ def update_orphan_uploads(open_api, stdout):
def set_fid(queryset, path, stdout): def set_fid(queryset, path, stdout):
model = queryset.model._meta.label model = queryset.model._meta.label
qs = queryset.filter(fid=None) qs = queryset.filter(fid=None)
base_url = "{}{}".format(settings.FUNKWHALE_URL, path) base_url = f"{settings.FUNKWHALE_URL}{path}"
stdout.write( stdout.write(f"* Assigning federation ids to {model} entries (path: {base_url})")
"* Assigning federation ids to {} entries (path: {})".format(model, base_url)
)
new_fid = functions.Concat(Value(base_url), F("uuid"), output_field=CharField()) new_fid = functions.Concat(Value(base_url), F("uuid"), output_field=CharField())
total = qs.update(fid=new_fid) total = qs.update(fid=new_fid)
stdout.write(" * {} entries updated".format(total)) stdout.write(f" * {total} entries updated")
def update_shared_inbox_url(stdout): def update_shared_inbox_url(stdout):
@ -123,16 +115,16 @@ def update_shared_inbox_url(stdout):
def generate_actor_urls(part, stdout): def generate_actor_urls(part, stdout):
field = "{}_url".format(part) field = f"{part}_url"
stdout.write("* Update {} for local actors...".format(field)) stdout.write(f"* Update {field} for local actors...")
queryset = federation_models.Actor.objects.local().filter(**{field: None}) queryset = federation_models.Actor.objects.local().filter(**{field: None})
base_url = "{}/federation/actors/".format(settings.FUNKWHALE_URL) base_url = f"{settings.FUNKWHALE_URL}/federation/actors/"
new_field = functions.Concat( new_field = functions.Concat(
Value(base_url), Value(base_url),
F("preferred_username"), F("preferred_username"),
Value("/{}".format(part)), Value(f"/{part}"),
output_field=CharField(), output_field=CharField(),
) )

Wyświetl plik

@ -72,7 +72,7 @@ def get_fts_query(query_string, fts_fields=["body_text"], model=None):
else: else:
query_string = remove_chars(query_string, ['"', "&", "(", ")", "!", "'"]) query_string = remove_chars(query_string, ['"', "&", "(", ")", "!", "'"])
parts = query_string.replace(":", "").split(" ") parts = query_string.replace(":", "").split(" ")
parts = ["{}:*".format(p) for p in parts if p] parts = [f"{p}:*" for p in parts if p]
if not parts: if not parts:
return Q(pk=None) return Q(pk=None)
@ -97,7 +97,7 @@ def get_fts_query(query_string, fts_fields=["body_text"], model=None):
) )
} }
).values_list("pk", flat=True) ).values_list("pk", flat=True)
new_query = Q(**{"{}__in".format(fk_field_name): list(subquery)}) new_query = Q(**{f"{fk_field_name}__in": list(subquery)})
else: else:
new_query = Q( new_query = Q(
**{ **{
@ -180,7 +180,7 @@ class SearchConfig:
except KeyError: except KeyError:
# no cleaning to apply # no cleaning to apply
value = token["value"] value = token["value"]
q = Q(**{"{}__icontains".format(to): value}) q = Q(**{f"{to}__icontains": value})
if not specific_field_query: if not specific_field_query:
specific_field_query = q specific_field_query = q
else: else:

Wyświetl plik

@ -82,14 +82,14 @@ class RelatedField(serializers.RelatedField):
) )
class Action(object): class Action:
def __init__(self, name, allow_all=False, qs_filter=None): def __init__(self, name, allow_all=False, qs_filter=None):
self.name = name self.name = name
self.allow_all = allow_all self.allow_all = allow_all
self.qs_filter = qs_filter self.qs_filter = qs_filter
def __repr__(self): def __repr__(self):
return "<Action {}>".format(self.name) return f"<Action {self.name}>"
class ActionSerializer(serializers.Serializer): class ActionSerializer(serializers.Serializer):
@ -113,7 +113,7 @@ class ActionSerializer(serializers.Serializer):
) )
for action in self.actions_by_name.keys(): for action in self.actions_by_name.keys():
handler_name = "handle_{}".format(action) handler_name = f"handle_{action}"
assert hasattr(self, handler_name), "{} miss a {} method".format( assert hasattr(self, handler_name), "{} miss a {} method".format(
self.__class__.__name__, handler_name self.__class__.__name__, handler_name
) )
@ -133,9 +133,9 @@ class ActionSerializer(serializers.Serializer):
if value == "all": if value == "all":
return self.queryset.all().order_by("id") return self.queryset.all().order_by("id")
if type(value) in [list, tuple]: if type(value) in [list, tuple]:
return self.queryset.filter( return self.queryset.filter(**{f"{self.pk_field}__in": value}).order_by(
**{"{}__in".format(self.pk_field): value} self.pk_field
).order_by(self.pk_field) )
raise serializers.ValidationError( raise serializers.ValidationError(
"{} is not a valid value for objects. You must provide either a " "{} is not a valid value for objects. You must provide either a "
@ -281,7 +281,7 @@ class APIMutationSerializer(serializers.ModelSerializer):
def validate_type(self, value): def validate_type(self, value):
if value not in self.context["registry"]: if value not in self.context["registry"]:
raise serializers.ValidationError("Invalid mutation type {}".format(value)) raise serializers.ValidationError(f"Invalid mutation type {value}")
return value return value
@ -321,7 +321,7 @@ class ContentSerializer(serializers.Serializer):
return utils.render_html(o.text, o.content_type) return utils.render_html(o.text, o.content_type)
class NullToEmptDict(object): class NullToEmptDict:
def get_attribute(self, o): def get_attribute(self, o):
attr = super().get_attribute(o) attr = super().get_attribute(o)
if attr is None: if attr is None:

Wyświetl plik

@ -36,7 +36,7 @@ def rename_file(instance, field_name, new_name, allow_missing_file=False):
field = getattr(instance, field_name) field = getattr(instance, field_name)
current_name, extension = os.path.splitext(field.name) current_name, extension = os.path.splitext(field.name)
new_name_with_extension = "{}{}".format(new_name, extension) new_name_with_extension = f"{new_name}{extension}"
try: try:
shutil.move(field.path, new_name_with_extension) shutil.move(field.path, new_name_with_extension)
except FileNotFoundError: except FileNotFoundError:
@ -71,7 +71,7 @@ def set_query_parameter(url, **kwargs):
@deconstructible @deconstructible
class ChunkedPath(object): class ChunkedPath:
def sanitize_filename(self, filename): def sanitize_filename(self, filename):
return filename.replace("/", "-") return filename.replace("/", "-")
@ -88,7 +88,7 @@ class ChunkedPath(object):
parts = chunks[:3] + [filename] parts = chunks[:3] + [filename]
else: else:
ext = os.path.splitext(filename)[1][1:].lower() ext = os.path.splitext(filename)[1][1:].lower()
new_filename = "".join(chunks[3:]) + ".{}".format(ext) new_filename = "".join(chunks[3:]) + f".{ext}"
parts = chunks[:3] + [new_filename] parts = chunks[:3] + [new_filename]
return os.path.join(self.root, *parts) return os.path.join(self.root, *parts)
@ -227,7 +227,7 @@ def replace_prefix(queryset, field, old, new):
on a whole table with a single query. on a whole table with a single query.
""" """
qs = queryset.filter(**{"{}__startswith".format(field): old}) qs = queryset.filter(**{f"{field}__startswith": old})
# we extract the part after the old prefix, and Concat it with our new prefix # we extract the part after the old prefix, and Concat it with our new prefix
update = models.functions.Concat( update = models.functions.Concat(
models.Value(new), models.Value(new),
@ -353,7 +353,7 @@ def attach_content(obj, field, content_data):
from . import models from . import models
content_data = content_data or {} content_data = content_data or {}
existing = getattr(obj, "{}_id".format(field)) existing = getattr(obj, f"{field}_id")
if existing: if existing:
if same_content(getattr(obj, field), **content_data): if same_content(getattr(obj, field), **content_data):
@ -378,7 +378,7 @@ def attach_content(obj, field, content_data):
def attach_file(obj, field, file_data, fetch=False): def attach_file(obj, field, file_data, fetch=False):
from . import models, tasks from . import models, tasks
existing = getattr(obj, "{}_id".format(field)) existing = getattr(obj, f"{field}_id")
if existing: if existing:
getattr(obj, field).delete() getattr(obj, field).delete()
@ -395,7 +395,7 @@ def attach_file(obj, field, file_data, fetch=False):
name = [ name = [
getattr(obj, field) for field in name_fields if getattr(obj, field, None) getattr(obj, field) for field in name_fields if getattr(obj, field, None)
][0] ][0]
filename = "{}-{}.{}".format(field, name, extension) filename = f"{field}-{name}.{extension}"
if "url" in file_data: if "url" in file_data:
attachment.url = file_data["url"] attachment.url = file_data["url"]
else: else:
@ -487,4 +487,4 @@ def get_file_hash(file, algo=None, chunk_size=None, full_read=False):
# sometimes, it's useful to only hash the beginning of the file, e.g # sometimes, it's useful to only hash the beginning of the file, e.g
# to avoid a lot of I/O when crawling large libraries # to avoid a lot of I/O when crawling large libraries
hash.update(file.read(chunk_size)) hash.update(file.read(chunk_size))
return "{}:{}".format(algo, hash.hexdigest()) return f"{algo}:{hash.hexdigest()}"

Wyświetl plik

@ -72,7 +72,7 @@ class ImageDimensionsValidator:
@deconstructible @deconstructible
class FileValidator(object): class FileValidator:
""" """
Taken from https://gist.github.com/jrosebr1/2140738 Taken from https://gist.github.com/jrosebr1/2140738
Validator for files, checking the size, extension and mimetype. Validator for files, checking the size, extension and mimetype.
@ -163,5 +163,5 @@ class DomainValidator(validators.URLValidator):
If it fails, we know the domain is not valid. If it fails, we know the domain is not valid.
""" """
super().__call__("http://{}".format(value)) super().__call__(f"http://{value}")
return value return value

Wyświetl plik

@ -1 +0,0 @@
# -*- coding: utf-8 -*-

Wyświetl plik

@ -68,7 +68,7 @@ class Track:
} }
def __repr__(self): def __repr__(self):
return "Track(%s, %s)" % (self.artist_name, self.track_name) return f"Track({self.artist_name}, {self.track_name})"
class ListenBrainzClient: class ListenBrainzClient:
@ -127,7 +127,7 @@ class ListenBrainzClient:
response_data = response_text response_data = response_text
self._handle_ratelimit(response) self._handle_ratelimit(response)
log_msg = "Response %s: %r" % (response.status, response_data) log_msg = f"Response {response.status}: {response_data!r}"
if response.status == 429 and retry < 5: # Too Many Requests if response.status == 429 and retry < 5: # Too Many Requests
self.logger.warning(log_msg) self.logger.warning(log_msg)
return self._submit(listen_type, payload, retry + 1) return self._submit(listen_type, payload, retry + 1)

Wyświetl plik

@ -84,16 +84,16 @@ def get_scrobble_payload(track, date, suffix="[0]"):
""" """
upload = track.uploads.filter(duration__gte=0).first() upload = track.uploads.filter(duration__gte=0).first()
data = { data = {
"a{}".format(suffix): track.artist.name, f"a{suffix}": track.artist.name,
"t{}".format(suffix): track.title, f"t{suffix}": track.title,
"l{}".format(suffix): upload.duration if upload else 0, f"l{suffix}": upload.duration if upload else 0,
"b{}".format(suffix): (track.album.title if track.album else "") or "", f"b{suffix}": (track.album.title if track.album else "") or "",
"n{}".format(suffix): track.position or "", f"n{suffix}": track.position or "",
"m{}".format(suffix): str(track.mbid or ""), f"m{suffix}": str(track.mbid or ""),
"o{}".format(suffix): "P", # Source: P = chosen by user f"o{suffix}": "P", # Source: P = chosen by user
} }
if date: if date:
data["i{}".format(suffix)] = int(date.timestamp()) data[f"i{suffix}"] = int(date.timestamp())
return data return data

Wyświetl plik

@ -1 +0,0 @@
# -*- coding: utf-8 -*-

Wyświetl plik

@ -316,16 +316,16 @@ class FunkwhaleProvider(internet_provider.Provider):
def federation_url(self, prefix="", local=False): def federation_url(self, prefix="", local=False):
def path_generator(): def path_generator():
return "{}/{}".format(prefix, uuid.uuid4()) return f"{prefix}/{uuid.uuid4()}"
domain = settings.FEDERATION_HOSTNAME if local else self.domain_name() domain = settings.FEDERATION_HOSTNAME if local else self.domain_name()
protocol = "https" protocol = "https"
path = path_generator() path = path_generator()
return "{}://{}/{}".format(protocol, domain, path) return f"{protocol}://{domain}/{path}"
def user_name(self): def user_name(self):
u = super().user_name() u = super().user_name()
return "{}{}".format(u, random.randint(10, 999)) return f"{u}{random.randint(10, 999)}"
def music_genre(self): def music_genre(self):
return random.choice(TAGS_DATA["genre"]) return random.choice(TAGS_DATA["genre"])

Wyświetl plik

@ -23,4 +23,4 @@ class TrackFavorite(models.Model):
return favorite return favorite
def get_activity_url(self): def get_activity_url(self):
return "{}/favorites/tracks/{}".format(self.user.get_activity_url(), self.pk) return f"{self.user.get_activity_url()}/favorites/tracks/{self.pk}"

Wyświetl plik

@ -241,8 +241,8 @@ class InboxRouter(Router):
for k in r.keys(): for k in r.keys():
if k in ["object", "target", "related_object"]: if k in ["object", "target", "related_object"]:
update_fields += [ update_fields += [
"{}_id".format(k), f"{k}_id",
"{}_content_type".format(k), f"{k}_content_type",
] ]
else: else:
update_fields.append(k) update_fields.append(k)
@ -264,7 +264,7 @@ class InboxRouter(Router):
user = ii.actor.get_user() user = ii.actor.get_user()
if not user: if not user:
continue continue
group = "user.{}.inbox".format(user.pk) group = f"user.{user.pk}.inbox"
channels.group_send( channels.group_send(
group, group,
{ {

Wyświetl plik

@ -22,7 +22,7 @@ def get_actor_data(actor_url):
try: try:
return response.json() return response.json()
except Exception: except Exception:
raise ValueError("Invalid actor payload: {}".format(response.text)) raise ValueError(f"Invalid actor payload: {response.text}")
def get_actor(fid, skip_cache=False): def get_actor(fid, skip_cache=False):

Wyświetl plik

@ -216,7 +216,7 @@ class FetchSerializer(serializers.ModelSerializer):
except validators.ValidationError: except validators.ValidationError:
return value return value
return "webfinger://{}".format(value) return f"webfinger://{value}"
def create(self, validated_data): def create(self, validated_data):
check_duplicates = not validated_data.get("force", False) check_duplicates = not validated_data.get("force", False)

Wyświetl plik

@ -190,12 +190,12 @@ class LibraryViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
) )
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
return response.Response( return response.Response(
{"detail": "Error while fetching the library: {}".format(str(e))}, {"detail": f"Error while fetching the library: {str(e)}"},
status=400, status=400,
) )
except serializers.serializers.ValidationError as e: except serializers.serializers.ValidationError as e:
return response.Response( return response.Response(
{"detail": "Invalid data in remote library: {}".format(str(e))}, {"detail": f"Invalid data in remote library: {str(e)}"},
status=400, status=400,
) )
serializer = self.serializer_class(library) serializer = self.serializer_class(library)

Wyświetl plik

@ -362,14 +362,14 @@ class NS:
def __getattr__(self, key): def __getattr__(self, key):
if key not in self.conf["document"]["@context"]: if key not in self.conf["document"]["@context"]:
raise AttributeError( raise AttributeError(
"{} is not a valid property of context {}".format(key, self.baseUrl) f"{key} is not a valid property of context {self.baseUrl}"
) )
return self.baseUrl + key return self.baseUrl + key
class NoopContext: class NoopContext:
def __getattr__(self, key): def __getattr__(self, key):
return "_:{}".format(key) return f"_:{key}"
NOOP = NoopContext() NOOP = NoopContext()

Wyświetl plik

@ -106,7 +106,7 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
summary = factory.Faker("paragraph") summary = factory.Faker("paragraph")
domain = factory.SubFactory(DomainFactory) domain = factory.SubFactory(DomainFactory)
fid = factory.LazyAttribute( fid = factory.LazyAttribute(
lambda o: "https://{}/users/{}".format(o.domain.name, o.preferred_username) lambda o: f"https://{o.domain.name}/users/{o.preferred_username}"
) )
followers_url = factory.LazyAttribute( followers_url = factory.LazyAttribute(
lambda o: "https://{}/users/{}followers".format( lambda o: "https://{}/users/{}followers".format(
@ -142,7 +142,7 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
self.domain = models.Domain.objects.get_or_create( self.domain = models.Domain.objects.get_or_create(
name=settings.FEDERATION_HOSTNAME name=settings.FEDERATION_HOSTNAME
)[0] )[0]
self.fid = "https://{}/actors/{}".format(self.domain, self.preferred_username) self.fid = f"https://{self.domain}/actors/{self.preferred_username}"
self.save(update_fields=["domain", "fid"]) self.save(update_fields=["domain", "fid"])
if not create: if not create:
if extracted and hasattr(extracted, "pk"): if extracted and hasattr(extracted, "pk"):
@ -300,13 +300,13 @@ class NoteFactory(factory.Factory):
@registry.register(name="federation.AudioMetadata") @registry.register(name="federation.AudioMetadata")
class AudioMetadataFactory(factory.Factory): class AudioMetadataFactory(factory.Factory):
recording = factory.LazyAttribute( recording = factory.LazyAttribute(
lambda o: "https://musicbrainz.org/recording/{}".format(uuid.uuid4()) lambda o: f"https://musicbrainz.org/recording/{uuid.uuid4()}"
) )
artist = factory.LazyAttribute( artist = factory.LazyAttribute(
lambda o: "https://musicbrainz.org/artist/{}".format(uuid.uuid4()) lambda o: f"https://musicbrainz.org/artist/{uuid.uuid4()}"
) )
release = factory.LazyAttribute( release = factory.LazyAttribute(
lambda o: "https://musicbrainz.org/release/{}".format(uuid.uuid4()) lambda o: f"https://musicbrainz.org/release/{uuid.uuid4()}"
) )
bitrate = 42 bitrate = 42
length = 43 length = 43

Wyświetl plik

@ -257,7 +257,7 @@ class JsonLdSerializer(serializers.Serializer):
data = expand(data) data = expand(data)
except ValueError as e: except ValueError as e:
raise serializers.ValidationError( raise serializers.ValidationError(
"{} is not a valid jsonld document: {}".format(data, e) f"{data} is not a valid jsonld document: {e}"
) )
try: try:
config = self.Meta.jsonld_mapping config = self.Meta.jsonld_mapping

Wyświetl plik

@ -21,7 +21,7 @@ def get_library_data(library_url, actor):
elif scode == 403: elif scode == 403:
return {"errors": ["Permission denied while scanning library"]} return {"errors": ["Permission denied while scanning library"]}
elif scode >= 400: elif scode >= 400:
return {"errors": ["Error {} while fetching the library".format(scode)]} return {"errors": [f"Error {scode} while fetching the library"]}
serializer = serializers.LibrarySerializer(data=response.json()) serializer = serializers.LibrarySerializer(data=response.json())
if not serializer.is_valid(): if not serializer.is_valid():
return {"errors": ["Invalid ActivityPub response from remote library"]} return {"errors": ["Invalid ActivityPub response from remote library"]}

Wyświetl plik

@ -67,9 +67,7 @@ class Command(BaseCommand):
for kls, fields in MODELS: for kls, fields in MODELS:
results[kls] = {} results[kls] = {}
for field in fields: for field in fields:
candidates = kls.objects.filter( candidates = kls.objects.filter(**{f"{field}__startswith": old_prefix})
**{"{}__startswith".format(field): old_prefix}
)
results[kls][field] = candidates.count() results[kls][field] = candidates.count()
total = sum([t for k in results.values() for t in k.values()]) total = sum([t for k in results.values() for t in k.values()])
@ -92,9 +90,7 @@ class Command(BaseCommand):
) )
else: else:
self.stdout.write( self.stdout.write(f"No objects found with prefix {old_prefix}, exiting.")
"No objects found with prefix {}, exiting.".format(old_prefix)
)
return return
if options["dry_run"]: if options["dry_run"]:
self.stdout.write( self.stdout.write(
@ -112,9 +108,7 @@ class Command(BaseCommand):
for kls, fields in results.items(): for kls, fields in results.items():
for field, count in fields.items(): for field, count in fields.items():
self.stdout.write( self.stdout.write(f"Replacing {field} on {count} {kls._meta.label}")
"Replacing {} on {} {}".format(field, count, kls._meta.label)
)
candidates = kls.objects.all() candidates = kls.objects.all()
utils.replace_prefix(candidates, field, old=old_prefix, new=new_prefix) utils.replace_prefix(candidates, field, old=old_prefix, new=new_prefix)
self.stdout.write("") self.stdout.write("")

Wyświetl plik

@ -80,7 +80,7 @@ class ActorQuerySet(models.QuerySet):
) )
qs = qs.annotate( qs = qs.annotate(
**{ **{
"_usage_{}".format(s): models.Sum( f"_usage_{s}": models.Sum(
"libraries__uploads__size", filter=uploads_query "libraries__uploads__size", filter=uploads_query
) )
} }
@ -226,22 +226,22 @@ class Actor(models.Model):
verbose_name = "Account" verbose_name = "Account"
def get_moderation_url(self): def get_moderation_url(self):
return "/manage/moderation/accounts/{}".format(self.full_username) return f"/manage/moderation/accounts/{self.full_username}"
@property @property
def webfinger_subject(self): def webfinger_subject(self):
return "{}@{}".format(self.preferred_username, settings.FEDERATION_HOSTNAME) return f"{self.preferred_username}@{settings.FEDERATION_HOSTNAME}"
@property @property
def private_key_id(self): def private_key_id(self):
return "{}#main-key".format(self.fid) return f"{self.fid}#main-key"
@property @property
def full_username(self) -> str: def full_username(self) -> str:
return "{}@{}".format(self.preferred_username, self.domain_id) return f"{self.preferred_username}@{self.domain_id}"
def __str__(self): def __str__(self):
return "{}@{}".format(self.preferred_username, self.domain_id) return f"{self.preferred_username}@{self.domain_id}"
@property @property
def is_local(self) -> bool: def is_local(self) -> bool:
@ -270,14 +270,14 @@ class Actor(models.Model):
def get_absolute_url(self): def get_absolute_url(self):
if self.is_local: if self.is_local:
return federation_utils.full_url("/@{}".format(self.preferred_username)) return federation_utils.full_url(f"/@{self.preferred_username}")
return self.url or self.fid return self.url or self.fid
def get_current_usage(self): def get_current_usage(self):
actor = self.__class__.objects.filter(pk=self.pk).with_current_usage().get() actor = self.__class__.objects.filter(pk=self.pk).with_current_usage().get()
data = {} data = {}
for s in ["draft", "pending", "skipped", "errored", "finished"]: for s in ["draft", "pending", "skipped", "errored", "finished"]:
data[s] = getattr(actor, "_usage_{}".format(s)) or 0 data[s] = getattr(actor, f"_usage_{s}") or 0
data["total"] = sum(data.values()) data["total"] = sum(data.values())
return data return data
@ -341,8 +341,8 @@ class Actor(models.Model):
# matches, we consider the actor has the permission to manage # matches, we consider the actor has the permission to manage
# the object # the object
domain = self.domain_id domain = self.domain_id
return obj.fid.startswith("http://{}/".format(domain)) or obj.fid.startswith( return obj.fid.startswith(f"http://{domain}/") or obj.fid.startswith(
"https://{}/".format(domain) f"https://{domain}/"
) )
@property @property
@ -498,9 +498,7 @@ class AbstractFollow(models.Model):
abstract = True abstract = True
def get_federation_id(self): def get_federation_id(self):
return federation_utils.full_url( return federation_utils.full_url(f"{self.actor.fid}#follows/{self.uuid}")
"{}#follows/{}".format(self.actor.fid, self.uuid)
)
class Follow(AbstractFollow): class Follow(AbstractFollow):
@ -594,7 +592,7 @@ class LibraryTrack(models.Model):
remote_response.raise_for_status() remote_response.raise_for_status()
extension = music_utils.get_ext_from_type(self.audio_mimetype) extension = music_utils.get_ext_from_type(self.audio_mimetype)
title = " - ".join([self.title, self.album_title, self.artist_name]) title = " - ".join([self.title, self.album_title, self.artist_name])
filename = "{}.{}".format(title, extension) filename = f"{title}.{extension}"
tmp_file = tempfile.TemporaryFile() tmp_file = tempfile.TemporaryFile()
for chunk in r.iter_content(chunk_size=512): for chunk in r.iter_content(chunk_size=512):
tmp_file.write(chunk) tmp_file.write(chunk)

Wyświetl plik

@ -116,7 +116,7 @@ class MediaSerializer(jsonld.JsonLdSerializer):
if not is_mimetype(v, self.allowed_mimetypes): if not is_mimetype(v, self.allowed_mimetypes):
raise serializers.ValidationError( raise serializers.ValidationError(
"Invalid mimetype {}. Allowed: {}".format(v, self.allowed_mimetypes) f"Invalid mimetype {v}. Allowed: {self.allowed_mimetypes}"
) )
return v return v
@ -371,7 +371,7 @@ class ActorSerializer(jsonld.JsonLdSerializer):
ret["publicKey"] = { ret["publicKey"] = {
"owner": instance.fid, "owner": instance.fid,
"publicKeyPem": instance.public_key, "publicKeyPem": instance.public_key,
"id": "{}#main-key".format(instance.fid), "id": f"{instance.fid}#main-key",
} }
ret["endpoints"] = {} ret["endpoints"] = {}
@ -453,7 +453,7 @@ class ActorSerializer(jsonld.JsonLdSerializer):
actor, actor,
rss_url=rss_url, rss_url=rss_url,
attributed_to_fid=attributed_to, attributed_to_fid=attributed_to,
**self.validated_data **self.validated_data,
) )
return actor return actor
@ -736,9 +736,7 @@ class FollowActionSerializer(serializers.Serializer):
.get() .get()
) )
except follow_class.DoesNotExist: except follow_class.DoesNotExist:
raise serializers.ValidationError( raise serializers.ValidationError(f"No follow to {self.action_type}")
"No follow to {}".format(self.action_type)
)
return validated_data return validated_data
def to_representation(self, instance): def to_representation(self, instance):
@ -749,7 +747,7 @@ class FollowActionSerializer(serializers.Serializer):
return { return {
"@context": jsonld.get_default_context(), "@context": jsonld.get_default_context(),
"id": instance.get_federation_id() + "/{}".format(self.action_type), "id": instance.get_federation_id() + f"/{self.action_type}",
"type": self.action_type.title(), "type": self.action_type.title(),
"actor": actor.fid, "actor": actor.fid,
"object": FollowSerializer(instance).data, "object": FollowSerializer(instance).data,
@ -855,7 +853,7 @@ class ActorWebfingerSerializer(serializers.Serializer):
def to_representation(self, instance): def to_representation(self, instance):
data = {} data = {}
data["subject"] = "acct:{}".format(instance.webfinger_subject) data["subject"] = f"acct:{instance.webfinger_subject}"
data["links"] = [ data["links"] = [
{"rel": "self", "href": instance.fid, "type": "application/activity+json"} {"rel": "self", "href": instance.fid, "type": "application/activity+json"}
] ]
@ -881,7 +879,7 @@ class ActivitySerializer(serializers.Serializer):
try: try:
object_serializer = OBJECT_SERIALIZERS[type] object_serializer = OBJECT_SERIALIZERS[type]
except KeyError: except KeyError:
raise serializers.ValidationError("Unsupported type {}".format(type)) raise serializers.ValidationError(f"Unsupported type {type}")
serializer = object_serializer(data=value) serializer = object_serializer(data=value)
serializer.is_valid(raise_exception=True) serializer.is_valid(raise_exception=True)
@ -1165,7 +1163,7 @@ MUSIC_ENTITY_JSONLD_MAPPING = {
def repr_tag(tag_name): def repr_tag(tag_name):
return {"type": "Hashtag", "name": "#{}".format(tag_name)} return {"type": "Hashtag", "name": f"#{tag_name}"}
def include_content(repr, content_obj): def include_content(repr, content_obj):
@ -1704,9 +1702,7 @@ class FlagSerializer(jsonld.JsonLdSerializer):
try: try:
return utils.get_object_by_fid(v, local=True) return utils.get_object_by_fid(v, local=True)
except ObjectDoesNotExist: except ObjectDoesNotExist:
raise serializers.ValidationError( raise serializers.ValidationError(f"Unknown id {v} for reported object")
"Unknown id {} for reported object".format(v)
)
def validate_type(self, tags): def validate_type(self, tags):
if tags: if tags:
@ -1918,7 +1914,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
tags = [item.tag.name for item in upload.get_all_tagged_items()] tags = [item.tag.name for item in upload.get_all_tagged_items()]
if tags: if tags:
data["tag"] = [repr_tag(name) for name in sorted(set(tags))] data["tag"] = [repr_tag(name) for name in sorted(set(tags))]
data["summary"] = " ".join(["#{}".format(name) for name in tags]) data["summary"] = " ".join([f"#{name}" for name in tags])
if self.context.get("include_ap_context", True): if self.context.get("include_ap_context", True):
data["@context"] = jsonld.get_default_context() data["@context"] = jsonld.get_default_context()
@ -2039,7 +2035,7 @@ class DeleteSerializer(jsonld.JsonLdSerializer):
try: try:
obj = utils.get_object_by_fid(url) obj = utils.get_object_by_fid(url)
except utils.ObjectDoesNotExist: except utils.ObjectDoesNotExist:
raise serializers.ValidationError("No object matching {}".format(url)) raise serializers.ValidationError(f"No object matching {url}")
if isinstance(obj, music_models.Upload): if isinstance(obj, music_models.Upload):
obj = obj.track obj = obj.track

Wyświetl plik

@ -31,7 +31,7 @@ def verify_date(raw_date):
now = timezone.now() now = timezone.now()
if dt < now - delta or dt > now + delta: if dt < now - delta or dt > now + delta:
raise forms.ValidationError( raise forms.ValidationError(
"Request Date {} is too far in the future or in the past".format(raw_date) f"Request Date {raw_date} is too far in the future or in the past"
) )
return dt return dt
@ -70,10 +70,10 @@ def verify_django(django_request, public_key):
signature = headers["Signature"] signature = headers["Signature"]
except KeyError: except KeyError:
raise exceptions.MissingSignature raise exceptions.MissingSignature
url = "http://noop{}".format(django_request.path) url = f"http://noop{django_request.path}"
query = django_request.META["QUERY_STRING"] query = django_request.META["QUERY_STRING"]
if query: if query:
url += "?{}".format(query) url += f"?{query}"
signature_headers = signature.split('headers="')[1].split('",')[0] signature_headers = signature.split('headers="')[1].split('",')[0]
expected = signature_headers.split(" ") expected = signature_headers.split(" ")
logger.debug("Signature expected headers: %s", expected) logger.debug("Signature expected headers: %s", expected)

Wyświetl plik

@ -170,7 +170,7 @@ def deliver_to_remote(delivery):
def fetch_nodeinfo(domain_name): def fetch_nodeinfo(domain_name):
s = session.get_session() s = session.get_session()
wellknown_url = "https://{}/.well-known/nodeinfo".format(domain_name) wellknown_url = f"https://{domain_name}/.well-known/nodeinfo"
response = s.get(url=wellknown_url) response = s.get(url=wellknown_url)
response.raise_for_status() response.raise_for_status()
serializer = serializers.NodeInfoSerializer(data=response.json()) serializer = serializers.NodeInfoSerializer(data=response.json())

Wyświetl plik

@ -122,10 +122,8 @@ def get_domain_query_from_url(domain, url_field="fid"):
to match objects that have this domain in the given field. to match objects that have this domain in the given field.
""" """
query = Q(**{"{}__startswith".format(url_field): "http://{}/".format(domain)}) query = Q(**{f"{url_field}__startswith": f"http://{domain}/"})
query = query | Q( query = query | Q(**{f"{url_field}__startswith": f"https://{domain}/"})
**{"{}__startswith".format(url_field): "https://{}/".format(domain)}
)
return query return query
@ -143,9 +141,7 @@ def is_local(url) -> bool:
return True return True
d = settings.FEDERATION_HOSTNAME d = settings.FEDERATION_HOSTNAME
return url.startswith("http://{}/".format(d)) or url.startswith( return url.startswith(f"http://{d}/") or url.startswith(f"https://{d}/")
"https://{}/".format(d)
)
def get_actor_data_from_username(username): def get_actor_data_from_username(username):
@ -164,8 +160,8 @@ def get_actor_from_username_data_query(field, data):
if field: if field:
return Q( return Q(
**{ **{
"{}__preferred_username__iexact".format(field): data["username"], f"{field}__preferred_username__iexact": data["username"],
"{}__domain__name__iexact".format(field): data["domain"], f"{field}__domain__name__iexact": data["domain"],
} }
) )
else: else:

Wyświetl plik

@ -68,7 +68,7 @@ class AuthenticatedIfAllowListEnabled(permissions.BasePermission):
return bool(request.actor) return bool(request.actor)
class FederationMixin(object): class FederationMixin:
permission_classes = [AuthenticatedIfAllowListEnabled] permission_classes = [AuthenticatedIfAllowListEnabled]
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
@ -223,9 +223,9 @@ class WellKnownViewSet(viewsets.GenericViewSet):
return HttpResponse(status=405) return HttpResponse(status=405)
try: try:
resource_type, resource = webfinger.clean_resource(request.GET["resource"]) resource_type, resource = webfinger.clean_resource(request.GET["resource"])
cleaner = getattr(webfinger, "clean_{}".format(resource_type)) cleaner = getattr(webfinger, f"clean_{resource_type}")
result = cleaner(resource) result = cleaner(resource)
handler = getattr(self, "handler_{}".format(resource_type)) handler = getattr(self, f"handler_{resource_type}")
data = handler(result) data = handler(result)
except forms.ValidationError as e: except forms.ValidationError as e:
return response.Response({"errors": {"resource": e.message}}, status=400) return response.Response({"errors": {"resource": e.message}}, status=400)

Wyświetl plik

@ -30,7 +30,7 @@ def clean_acct(acct_string, ensure_local=True):
raise forms.ValidationError("Invalid format") raise forms.ValidationError("Invalid format")
if ensure_local and hostname.lower() != settings.FEDERATION_HOSTNAME: if ensure_local and hostname.lower() != settings.FEDERATION_HOSTNAME:
raise forms.ValidationError("Invalid hostname {}".format(hostname)) raise forms.ValidationError(f"Invalid hostname {hostname}")
return username, hostname return username, hostname

Wyświetl plik

@ -22,4 +22,4 @@ class Listening(models.Model):
ordering = ("-creation_date",) ordering = ("-creation_date",)
def get_activity_url(self): def get_activity_url(self):
return "{}/listenings/tracks/{}".format(self.user.get_activity_url(), self.pk) return f"{self.user.get_activity_url()}/listenings/tracks/{self.pk}"

Wyświetl plik

@ -85,10 +85,8 @@ class ManageUserSerializer(serializers.ModelSerializer):
permissions = validated_data.pop("permissions", {}) permissions = validated_data.pop("permissions", {})
if permissions: if permissions:
for p, value in permissions.items(): for p, value in permissions.items():
setattr(instance, "permission_{}".format(p), value) setattr(instance, f"permission_{p}", value)
instance.save( instance.save(update_fields=[f"permission_{p}" for p in permissions.keys()])
update_fields=["permission_{}".format(p) for p in permissions.keys()]
)
return instance return instance
@extend_schema_field(OpenApiTypes.OBJECT) @extend_schema_field(OpenApiTypes.OBJECT)

Wyświetl plik

@ -19,7 +19,7 @@ def get_filtered_content_query(config, user):
query = None query = None
ids = user.content_filters.values_list(filter_field, flat=True) ids = user.content_filters.values_list(filter_field, flat=True)
for model_field in model_fields: for model_field in model_fields:
q = Q(**{"{}__in".format(model_field): ids}) q = Q(**{f"{model_field}__in": ids})
if query: if query:
query |= q query |= q
else: else:

Wyświetl plik

@ -71,7 +71,7 @@ class Command(BaseCommand):
) )
) )
for name in registry.keys(): for name in registry.keys():
self.stdout.write("- {}".format(name)) self.stdout.write(f"- {name}")
return return
raw_content = None raw_content = None
content = None content = None

Wyświetl plik

@ -29,13 +29,11 @@ def check_allow_list(payload, **kwargs):
utils.recursive_getattr(payload, "object.id", permissive=True), utils.recursive_getattr(payload, "object.id", permissive=True),
] ]
relevant_domains = set( relevant_domains = {
[ domain
domain for domain in [urllib.parse.urlparse(i).hostname for i in relevant_ids if i]
for domain in [urllib.parse.urlparse(i).hostname for i in relevant_ids if i] if domain
if domain }
]
)
if relevant_domains - allowed_domains: if relevant_domains - allowed_domains:

Wyświetl plik

@ -61,7 +61,7 @@ class UserFilterSerializer(serializers.ModelSerializer):
state_serializers = persisting_theory.Registry() state_serializers = persisting_theory.Registry()
class DescriptionStateMixin(object): class DescriptionStateMixin:
def get_description(self, o): def get_description(self, o):
if o.description: if o.description:
return o.description.text return o.description.text

Wyświetl plik

@ -64,9 +64,7 @@ def send_new_report_email_to_moderators(report):
subject = "[{} moderation - {}] New report from {}".format( subject = "[{} moderation - {}] New report from {}".format(
settings.FUNKWHALE_HOSTNAME, report.get_type_display(), submitter_repr settings.FUNKWHALE_HOSTNAME, report.get_type_display(), submitter_repr
) )
detail_url = federation_utils.full_url( detail_url = federation_utils.full_url(f"/manage/moderation/reports/{report.uuid}")
"/manage/moderation/reports/{}".format(report.uuid)
)
unresolved_reports_url = federation_utils.full_url( unresolved_reports_url = federation_utils.full_url(
"/manage/moderation/reports?q=resolved:no" "/manage/moderation/reports?q=resolved:no"
) )
@ -97,7 +95,7 @@ def send_new_report_email_to_moderators(report):
body += [ body += [
"", "",
"- To handle this report, please visit {}".format(detail_url), f"- To handle this report, please visit {detail_url}",
"- To view all unresolved reports (currently {}), please visit {}".format( "- To view all unresolved reports (currently {}), please visit {}".format(
unresolved_reports, unresolved_reports_url unresolved_reports, unresolved_reports_url
), ),
@ -173,9 +171,7 @@ def notify_mods_signup_request_pending(obj):
subject = "[{} moderation] New sign-up request from {}".format( subject = "[{} moderation] New sign-up request from {}".format(
settings.FUNKWHALE_HOSTNAME, submitter_repr settings.FUNKWHALE_HOSTNAME, submitter_repr
) )
detail_url = federation_utils.full_url( detail_url = federation_utils.full_url(f"/manage/moderation/requests/{obj.uuid}")
"/manage/moderation/requests/{}".format(obj.uuid)
)
unresolved_requests_url = federation_utils.full_url( unresolved_requests_url = federation_utils.full_url(
"/manage/moderation/requests?q=status:pending" "/manage/moderation/requests?q=status:pending"
) )
@ -185,7 +181,7 @@ def notify_mods_signup_request_pending(obj):
submitter_repr submitter_repr
), ),
"", "",
"- To handle this request, please visit {}".format(detail_url), f"- To handle this request, please visit {detail_url}",
"- To view all unresolved requests (currently {}), please visit {}".format( "- To view all unresolved requests (currently {}), please visit {}".format(
unresolved_requests, unresolved_requests_url unresolved_requests, unresolved_requests_url
), ),
@ -217,10 +213,10 @@ def notify_submitter_signup_request_approved(user_request):
if not submitter_email: if not submitter_email:
logger.warning("User %s has no e-mail address configured", submitter_repr) logger.warning("User %s has no e-mail address configured", submitter_repr)
return return
subject = "Welcome to {}, {}!".format(settings.FUNKWHALE_HOSTNAME, submitter_repr) subject = f"Welcome to {settings.FUNKWHALE_HOSTNAME}, {submitter_repr}!"
login_url = federation_utils.full_url("/login") login_url = federation_utils.full_url("/login")
body = [ body = [
"Hi {} and welcome,".format(submitter_repr), f"Hi {submitter_repr} and welcome,",
"", "",
"Our moderation team has approved your account request and you can now start " "Our moderation team has approved your account request and you can now start "
"using the service. Please visit {} to get started.".format(login_url), "using the service. Please visit {} to get started.".format(login_url),
@ -246,7 +242,7 @@ def notify_submitter_signup_request_refused(user_request):
settings.FUNKWHALE_HOSTNAME settings.FUNKWHALE_HOSTNAME
) )
body = [ body = [
"Hi {},".format(submitter_repr), f"Hi {submitter_repr},",
"", "",
"You recently submitted an account request on our service. However, our " "You recently submitted an account request on our service. However, our "
"moderation team has refused it, and as a result, you won't be able to use " "moderation team has refused it, and as a result, you won't be able to use "

Wyświetl plik

@ -6,7 +6,7 @@ def load(model, *args, **kwargs):
EXCLUDE_VALIDATION = {"Track": ["artist"]} EXCLUDE_VALIDATION = {"Track": ["artist"]}
class Importer(object): class Importer:
def __init__(self, model): def __init__(self, model):
self.model = model self.model = model
@ -22,7 +22,7 @@ class Importer(object):
return m return m
class Mapping(object): class Mapping:
"""Cast musicbrainz data to funkwhale data and vice-versa""" """Cast musicbrainz data to funkwhale data and vice-versa"""
def __init__(self, musicbrainz_mapping): def __init__(self, musicbrainz_mapping):

Wyświetl plik

@ -70,7 +70,7 @@ def match(*values):
value, value,
) )
if not urls: if not urls:
logger.debug('Impossible to guess license from string "{}"'.format(value)) logger.debug(f'Impossible to guess license from string "{value}"')
continue continue
url = urls[0] url = urls[0]
if _cache: if _cache:
@ -122,7 +122,7 @@ def get_cc_license(version, perks, country=None, country_name=None):
) )
if country: if country:
code_parts.append(country) code_parts.append(country)
name += " {}".format(country_name) name += f" {country_name}"
url += country + "/" url += country + "/"
data = { data = {
"name": name, "name": name,

Wyświetl plik

@ -13,7 +13,7 @@ def progress(buffer, count, total, status=""):
bar = "=" * filled_len + "-" * (bar_len - filled_len) bar = "=" * filled_len + "-" * (bar_len - filled_len)
buffer.write("[%s] %s/%s ...%s\r" % (bar, count, total, status)) buffer.write(f"[{bar}] {count}/{total} ...{status}\r")
buffer.flush() buffer.flush()
@ -43,7 +43,7 @@ class Command(BaseCommand):
candidates = models.Upload.objects.filter(source__startswith="file://") candidates = models.Upload.objects.filter(source__startswith="file://")
candidates = candidates.filter(audio_file__in=["", None]) candidates = candidates.filter(audio_file__in=["", None])
total = candidates.count() total = candidates.count()
self.stdout.write("Checking {} in-place imported files…".format(total)) self.stdout.write(f"Checking {total} in-place imported files…")
missing = [] missing = []
for i, row in enumerate(candidates.values("id", "source").iterator()): for i, row in enumerate(candidates.values("id", "source").iterator()):
@ -54,7 +54,7 @@ class Command(BaseCommand):
if missing: if missing:
for path, _ in missing: for path, _ in missing:
self.stdout.write(" {}".format(path)) self.stdout.write(f" {path}")
self.stdout.write( self.stdout.write(
"The previous {} paths are referenced in database, but not found on disk!".format( "The previous {} paths are referenced in database, but not found on disk!".format(
len(missing) len(missing)
@ -71,5 +71,5 @@ class Command(BaseCommand):
"Nothing was deleted, rerun this command with --no-dry-run to apply the changes" "Nothing was deleted, rerun this command with --no-dry-run to apply the changes"
) )
else: else:
self.stdout.write("Deleting {} uploads…".format(to_delete.count())) self.stdout.write(f"Deleting {to_delete.count()} uploads…")
to_delete.delete() to_delete.delete()

Wyświetl plik

@ -21,7 +21,7 @@ class Command(BaseCommand):
errored.append((data, response)) errored.append((data, response))
if errored: if errored:
self.stdout.write("{} licenses were not reachable!".format(len(errored))) self.stdout.write(f"{len(errored)} licenses were not reachable!")
for row, response in errored: for row, response in errored:
self.stdout.write( self.stdout.write(
"- {}: error {} at url {}".format( "- {}: error {} at url {}".format(

Wyświetl plik

@ -73,13 +73,11 @@ class Command(BaseCommand):
Q(source__startswith="file://") | Q(source__startswith="upload://") Q(source__startswith="file://") | Q(source__startswith="upload://")
).exclude(mimetype__startswith="audio/") ).exclude(mimetype__startswith="audio/")
total = matching.count() total = matching.count()
self.stdout.write( self.stdout.write(f"[mimetypes] {total} entries found with bad or no mimetype")
"[mimetypes] {} entries found with bad or no mimetype".format(total)
)
if not total: if not total:
return return
for extension, mimetype in utils.EXTENSION_TO_MIMETYPE.items(): for extension, mimetype in utils.EXTENSION_TO_MIMETYPE.items():
qs = matching.filter(source__endswith=".{}".format(extension)) qs = matching.filter(source__endswith=f".{extension}")
self.stdout.write( self.stdout.write(
"[mimetypes] setting {} {} files to {}".format( "[mimetypes] setting {} {} files to {}".format(
qs.count(), extension, mimetype qs.count(), extension, mimetype
@ -95,9 +93,7 @@ class Command(BaseCommand):
Q(bitrate__isnull=True) | Q(duration__isnull=True) Q(bitrate__isnull=True) | Q(duration__isnull=True)
) )
total = matching.count() total = matching.count()
self.stdout.write( self.stdout.write(f"[bitrate/length] {total} entries found with missing values")
"[bitrate/length] {} entries found with missing values".format(total)
)
if dry_run: if dry_run:
return return
@ -135,7 +131,7 @@ class Command(BaseCommand):
self.stdout.write("Fixing missing size...") self.stdout.write("Fixing missing size...")
matching = models.Upload.objects.filter(size__isnull=True) matching = models.Upload.objects.filter(size__isnull=True)
total = matching.count() total = matching.count()
self.stdout.write("[size] {} entries found with missing values".format(total)) self.stdout.write(f"[size] {total} entries found with missing values")
if dry_run: if dry_run:
return return
@ -148,16 +144,12 @@ class Command(BaseCommand):
for upload in chunk: for upload in chunk:
handled += 1 handled += 1
self.stdout.write( self.stdout.write(f"[size] {handled}/{total} fixing file #{upload.pk}")
"[size] {}/{} fixing file #{}".format(handled, total, upload.pk)
)
try: try:
upload.size = upload.get_file_size() upload.size = upload.get_file_size()
except Exception as e: except Exception as e:
self.stderr.write( self.stderr.write(f"[size] error with file #{upload.pk}: {str(e)}")
"[size] error with file #{}: {}".format(upload.pk, str(e))
)
else: else:
updated.append(upload) updated.append(upload)
@ -170,9 +162,7 @@ class Command(BaseCommand):
& (Q(audio_file__isnull=False) | Q(source__startswith="file://")) & (Q(audio_file__isnull=False) | Q(source__startswith="file://"))
) )
total = matching.count() total = matching.count()
self.stdout.write( self.stdout.write(f"[checksum] {total} entries found with missing values")
"[checksum] {} entries found with missing values".format(total)
)
if dry_run: if dry_run:
return return
chunks = common_utils.chunk_queryset( chunks = common_utils.chunk_queryset(
@ -184,7 +174,7 @@ class Command(BaseCommand):
for upload in chunk: for upload in chunk:
handled += 1 handled += 1
self.stdout.write( self.stdout.write(
"[checksum] {}/{} fixing file #{}".format(handled, total, upload.pk) f"[checksum] {handled}/{total} fixing file #{upload.pk}"
) )
try: try:
@ -193,7 +183,7 @@ class Command(BaseCommand):
) )
except Exception as e: except Exception as e:
self.stderr.write( self.stderr.write(
"[checksum] error with file #{}: {}".format(upload.pk, str(e)) f"[checksum] error with file #{upload.pk}: {str(e)}"
) )
else: else:
updated.append(upload) updated.append(upload)

Wyświetl plik

@ -31,7 +31,7 @@ def crawl_dir(dir, extensions, recursive=True, ignored=[]):
try: try:
scanner = os.scandir(dir) scanner = os.scandir(dir)
except Exception as e: except Exception as e:
m = "Error while reading {}: {} {}\n".format(dir, e.__class__.__name__, e) m = f"Error while reading {dir}: {e.__class__.__name__} {e}\n"
sys.stderr.write(m) sys.stderr.write(m)
return return
try: try:
@ -39,7 +39,7 @@ def crawl_dir(dir, extensions, recursive=True, ignored=[]):
try: try:
if entry.is_file(): if entry.is_file():
for e in extensions: for e in extensions:
if entry.name.lower().endswith(".{}".format(e.lower())): if entry.name.lower().endswith(f".{e.lower()}"):
if entry.path not in ignored: if entry.path not in ignored:
yield entry.path yield entry.path
elif recursive and entry.is_dir(): elif recursive and entry.is_dir():
@ -260,7 +260,7 @@ class Command(BaseCommand):
raise CommandError("Invalid library id") raise CommandError("Invalid library id")
if not library.actor.get_user(): if not library.actor.get_user():
raise CommandError("Library {} is not a local library".format(library.uuid)) raise CommandError(f"Library {library.uuid} is not a local library")
if options["in_place"]: if options["in_place"]:
self.stdout.write( self.stdout.write(
@ -282,7 +282,7 @@ class Command(BaseCommand):
"Culprit: {}".format(p, import_path) "Culprit: {}".format(p, import_path)
) )
reference = options["reference"] or "cli-{}".format(timezone.now().isoformat()) reference = options["reference"] or f"cli-{timezone.now().isoformat()}"
import_url = "{}://{}/library/{}/upload?{}" import_url = "{}://{}/library/{}/upload?{}"
import_url = import_url.format( import_url = import_url.format(
@ -393,10 +393,10 @@ class Command(BaseCommand):
message.format(total - len(errors), int(time.time() - start_time)) message.format(total - len(errors), int(time.time() - start_time))
) )
if len(errors) > 0: if len(errors) > 0:
self.stderr.write("{} tracks could not be imported:".format(len(errors))) self.stderr.write(f"{len(errors)} tracks could not be imported:")
for path, error in errors: for path, error in errors:
self.stderr.write("- {}: {}".format(path, error)) self.stderr.write(f"- {path}: {error}")
self.stdout.write( self.stdout.write(
"For details, please refer to import reference '{}' or URL {}".format( "For details, please refer to import reference '{}' or URL {}".format(
@ -485,12 +485,12 @@ class Command(BaseCommand):
return errors return errors
def filter_matching(self, matching, library): def filter_matching(self, matching, library):
sources = ["file://{}".format(p) for p in matching] sources = [f"file://{p}" for p in matching]
# we skip reimport for path that are already found # we skip reimport for path that are already found
# as a Upload.source # as a Upload.source
existing = library.uploads.filter(source__in=sources, import_status="finished") existing = library.uploads.filter(source__in=sources, import_status="finished")
existing = existing.values_list("source", flat=True) existing = existing.values_list("source", flat=True)
existing = set([p.replace("file://", "", 1) for p in existing]) existing = {p.replace("file://", "", 1) for p in existing}
skipped = set(matching) & existing skipped = set(matching) & existing
result = { result = {
"initial": matching, "initial": matching,
@ -530,7 +530,7 @@ class Command(BaseCommand):
path, e.__class__.__name__, e path, e.__class__.__name__, e
) )
self.stderr.write(m) self.stderr.write(m)
errors.append((path, "{} {}".format(e.__class__.__name__, e))) errors.append((path, f"{e.__class__.__name__} {e}"))
return errors return errors
def setup_watcher(self, path, extensions, recursive, **kwargs): def setup_watcher(self, path, extensions, recursive, **kwargs):
@ -544,7 +544,7 @@ class Command(BaseCommand):
worker.start() worker.start()
# setup watchdog to monitor directory for trigger files # setup watchdog to monitor directory for trigger files
patterns = ["*.{}".format(e) for e in extensions] patterns = [f"*.{e}" for e in extensions]
event_handler = Watcher( event_handler = Watcher(
stdout=self.stdout, stdout=self.stdout,
queue=watchdog_queue, queue=watchdog_queue,
@ -556,9 +556,7 @@ class Command(BaseCommand):
try: try:
while True: while True:
self.stdout.write( self.stdout.write(f"Watching for changes at {path}", ending="\r")
"Watching for changes at {}".format(path), ending="\r"
)
time.sleep(10) time.sleep(10)
if kwargs["prune"] and GLOBAL["need_pruning"]: if kwargs["prune"] and GLOBAL["need_pruning"]:
self.stdout.write("Some files were deleted, pruning library…") self.stdout.write("Some files were deleted, pruning library…")
@ -728,7 +726,7 @@ def handle_modified(event, stdout, library, in_place, **kwargs):
try: try:
tasks.update_track_metadata(audio_metadata, to_update.track) tasks.update_track_metadata(audio_metadata, to_update.track)
except serializers.ValidationError as e: except serializers.ValidationError as e:
stdout.write(" Invalid metadata: {}".format(e)) stdout.write(f" Invalid metadata: {e}")
else: else:
to_update.checksum = checksum to_update.checksum = checksum
to_update.save(update_fields=["checksum"]) to_update.save(update_fields=["checksum"])
@ -765,7 +763,7 @@ def handle_moved(event, stdout, library, in_place, **kwargs):
existing_candidates = existing_candidates.in_place().filter(source=old_source) existing_candidates = existing_candidates.in_place().filter(source=old_source)
existing = existing_candidates.first() existing = existing_candidates.first()
if existing: if existing:
stdout.write(" Updating path of existing file #{}".format(existing.pk)) stdout.write(f" Updating path of existing file #{existing.pk}")
existing.source = new_source existing.source = new_source
existing.save(update_fields=["source"]) existing.save(update_fields=["source"])
@ -794,15 +792,14 @@ def check_updates(stdout, library, extensions, paths, batch_size):
for path in paths: for path in paths:
for ext in extensions: for ext in extensions:
queries.append( queries.append(
Q(source__startswith="file://{}".format(path)) Q(source__startswith=f"file://{path}") & Q(source__endswith=f".{ext}")
& Q(source__endswith=".{}".format(ext))
) )
query, remainder = queries[0], queries[1:] query, remainder = queries[0], queries[1:]
for q in remainder: for q in remainder:
query = q | query query = q | query
existing = existing.filter(query) existing = existing.filter(query)
total = existing.count() total = existing.count()
stdout.write("Found {} files to check in database!".format(total)) stdout.write(f"Found {total} files to check in database!")
uploads = existing.order_by("source") uploads = existing.order_by("source")
for i, rows in enumerate(batch(uploads.iterator(), batch_size)): for i, rows in enumerate(batch(uploads.iterator(), batch_size)):
stdout.write( stdout.write(
@ -849,7 +846,7 @@ def check_upload(stdout, upload):
try: try:
tasks.update_track_metadata(upload.get_metadata(), track) tasks.update_track_metadata(upload.get_metadata(), track)
except serializers.ValidationError as e: except serializers.ValidationError as e:
stdout.write(" Invalid metadata: {}".format(e)) stdout.write(f" Invalid metadata: {e}")
return return
except IntegrityError: except IntegrityError:
stdout.write( stdout.write(

Wyświetl plik

@ -101,11 +101,9 @@ class Command(BaseCommand):
pruned_total = prunable.count() pruned_total = prunable.count()
total = models.Track.objects.count() total = models.Track.objects.count()
if options["dry_run"]: if options["dry_run"]:
self.stdout.write( self.stdout.write(f"Would prune {pruned_total}/{total} tracks")
"Would prune {}/{} tracks".format(pruned_total, total)
)
else: else:
self.stdout.write("Deleting {}/{} tracks…".format(pruned_total, total)) self.stdout.write(f"Deleting {pruned_total}/{total} tracks…")
prunable.delete() prunable.delete()
if options["prune_albums"]: if options["prune_albums"]:
@ -113,11 +111,9 @@ class Command(BaseCommand):
pruned_total = prunable.count() pruned_total = prunable.count()
total = models.Album.objects.count() total = models.Album.objects.count()
if options["dry_run"]: if options["dry_run"]:
self.stdout.write( self.stdout.write(f"Would prune {pruned_total}/{total} albums")
"Would prune {}/{} albums".format(pruned_total, total)
)
else: else:
self.stdout.write("Deleting {}/{} albums…".format(pruned_total, total)) self.stdout.write(f"Deleting {pruned_total}/{total} albums…")
prunable.delete() prunable.delete()
if options["prune_artists"]: if options["prune_artists"]:
@ -125,11 +121,9 @@ class Command(BaseCommand):
pruned_total = prunable.count() pruned_total = prunable.count()
total = models.Artist.objects.count() total = models.Artist.objects.count()
if options["dry_run"]: if options["dry_run"]:
self.stdout.write( self.stdout.write(f"Would prune {pruned_total}/{total} artists")
"Would prune {}/{} artists".format(pruned_total, total)
)
else: else:
self.stdout.write("Deleting {}/{} artists…".format(pruned_total, total)) self.stdout.write(f"Deleting {pruned_total}/{total} artists…")
prunable.delete() prunable.delete()
self.stdout.write("") self.stdout.write("")

Wyświetl plik

@ -355,15 +355,15 @@ class Metadata(Mapping):
def __init__(self, filething, kind=mutagen.File): def __init__(self, filething, kind=mutagen.File):
self._file = kind(filething) self._file = kind(filething)
if self._file is None: if self._file is None:
raise ValueError("Cannot parse metadata from {}".format(filething)) raise ValueError(f"Cannot parse metadata from {filething}")
if len(self._file) == 0: if len(self._file) == 0:
raise ValueError("No tags found in {}".format(filething)) raise ValueError(f"No tags found in {filething}")
self.fallback = self.load_fallback(filething, self._file) self.fallback = self.load_fallback(filething, self._file)
ft = self.get_file_type(self._file) ft = self.get_file_type(self._file)
try: try:
self._conf = CONF[ft] self._conf = CONF[ft]
except KeyError: except KeyError:
raise ValueError("Unsupported format {}".format(ft)) raise ValueError(f"Unsupported format {ft}")
def get_file_type(self, f): def get_file_type(self, f):
return f.__class__.__name__ return f.__class__.__name__
@ -420,7 +420,7 @@ class Metadata(Mapping):
try: try:
field_conf = self._conf["fields"][key] field_conf = self._conf["fields"][key]
except KeyError: except KeyError:
raise UnsupportedTag("{} is not supported for this file format".format(key)) raise UnsupportedTag(f"{key} is not supported for this file format")
real_key = field_conf.get("field", key) real_key = field_conf.get("field", key)
try: try:
getter = field_conf.get("getter", self._conf["getter"]) getter = field_conf.get("getter", self._conf["getter"])
@ -467,8 +467,7 @@ class Metadata(Mapping):
return 1 return 1
def __iter__(self): def __iter__(self):
for field in self._conf["fields"]: yield from self._conf["fields"]
yield field
class ArtistField(serializers.Field): class ArtistField(serializers.Field):

Wyświetl plik

@ -85,9 +85,7 @@ class APIModelMixin(models.Model):
cls.musicbrainz_model cls.musicbrainz_model
] ]
else: else:
raw_data = cls.api.search(**kwargs)[ raw_data = cls.api.search(**kwargs)[f"{cls.musicbrainz_model}-list"][0]
"{0}-list".format(cls.musicbrainz_model)
][0]
cleaned_data = cls.clean_musicbrainz_data(raw_data) cleaned_data = cls.clean_musicbrainz_data(raw_data)
return importers.load(cls, cleaned_data, raw_data, cls.import_hooks) return importers.load(cls, cleaned_data, raw_data, cls.import_hooks)
@ -116,7 +114,7 @@ class APIModelMixin(models.Model):
return federation_utils.full_url( return federation_utils.full_url(
reverse( reverse(
"federation:music:{}-detail".format(self.federation_namespace), f"federation:music:{self.federation_namespace}-detail",
kwargs={"uuid": self.uuid}, kwargs={"uuid": self.uuid},
) )
) )
@ -252,10 +250,10 @@ class Artist(APIModelMixin):
return self.name return self.name
def get_absolute_url(self): def get_absolute_url(self):
return "/library/artists/{}".format(self.pk) return f"/library/artists/{self.pk}"
def get_moderation_url(self): def get_moderation_url(self):
return "/manage/library/artists/{}".format(self.pk) return f"/manage/library/artists/{self.pk}"
@classmethod @classmethod
def get_or_create_from_name(cls, name, **kwargs): def get_or_create_from_name(cls, name, **kwargs):
@ -396,10 +394,10 @@ class Album(APIModelMixin):
return self.title return self.title
def get_absolute_url(self): def get_absolute_url(self):
return "/library/albums/{}".format(self.pk) return f"/library/albums/{self.pk}"
def get_moderation_url(self): def get_moderation_url(self):
return "/manage/library/albums/{}".format(self.pk) return f"/manage/library/albums/{self.pk}"
@classmethod @classmethod
def get_or_create_from_title(cls, title, **kwargs): def get_or_create_from_title(cls, title, **kwargs):
@ -557,10 +555,10 @@ class Track(APIModelMixin):
return self.title return self.title
def get_absolute_url(self): def get_absolute_url(self):
return "/library/tracks/{}".format(self.pk) return f"/library/tracks/{self.pk}"
def get_moderation_url(self): def get_moderation_url(self):
return "/manage/library/tracks/{}".format(self.pk) return f"/manage/library/tracks/{self.pk}"
def save(self, **kwargs): def save(self, **kwargs):
try: try:
@ -572,9 +570,9 @@ class Track(APIModelMixin):
@property @property
def full_name(self): def full_name(self):
try: try:
return "{} - {} - {}".format(self.artist.name, self.album.title, self.title) return f"{self.artist.name} - {self.album.title} - {self.title}"
except AttributeError: except AttributeError:
return "{} - {}".format(self.artist.name, self.title) return f"{self.artist.name} - {self.title}"
@property @property
def cover(self): def cover(self):
@ -582,8 +580,8 @@ class Track(APIModelMixin):
def get_activity_url(self): def get_activity_url(self):
if self.mbid: if self.mbid:
return "https://musicbrainz.org/recording/{}".format(self.mbid) return f"https://musicbrainz.org/recording/{self.mbid}"
return settings.FUNKWHALE_URL + "/tracks/{}".format(self.pk) return settings.FUNKWHALE_URL + f"/tracks/{self.pk}"
@classmethod @classmethod
def get_or_create_from_title(cls, title, **kwargs): def get_or_create_from_title(cls, title, **kwargs):
@ -643,7 +641,7 @@ class Track(APIModelMixin):
@property @property
def listen_url(self) -> str: def listen_url(self) -> str:
# Not using reverse because this is slow # Not using reverse because this is slow
return "/api/v1/listen/{}/".format(self.uuid) return f"/api/v1/listen/{self.uuid}/"
@property @property
def local_license(self): def local_license(self):
@ -807,7 +805,7 @@ class Upload(models.Model):
title_parts.append(self.track.artist.name) title_parts.append(self.track.artist.name)
title = " - ".join(title_parts) title = " - ".join(title_parts)
filename = "{}.{}".format(title, extension) filename = f"{title}.{extension}"
tmp_file = tempfile.TemporaryFile() tmp_file = tempfile.TemporaryFile()
for chunk in r.iter_content(chunk_size=512): for chunk in r.iter_content(chunk_size=512):
tmp_file.write(chunk) tmp_file.write(chunk)
@ -824,7 +822,7 @@ class Upload(models.Model):
@property @property
def filename(self) -> str: def filename(self) -> str:
return "{}.{}".format(self.track.full_name, self.extension) return f"{self.track.full_name}.{self.extension}"
@property @property
def extension(self): def extension(self):
@ -900,12 +898,12 @@ class Upload(models.Model):
@property @property
def listen_url(self) -> str: def listen_url(self) -> str:
return self.track.listen_url + "?upload={}".format(self.uuid) return self.track.listen_url + f"?upload={self.uuid}"
def get_listen_url(self, to=None, download=True) -> str: def get_listen_url(self, to=None, download=True) -> str:
url = self.listen_url url = self.listen_url
if to: if to:
url += "&to={}".format(to) url += f"&to={to}"
if not download: if not download:
url += "&download=false" url += "&download=false"
@ -946,9 +944,9 @@ class Upload(models.Model):
bitrate = min(bitrate or 320000, self.bitrate or 320000) bitrate = min(bitrate or 320000, self.bitrate or 320000)
version = self.versions.create(mimetype=mimetype, bitrate=bitrate, size=0) version = self.versions.create(mimetype=mimetype, bitrate=bitrate, size=0)
# we keep the same name, but we update the extension # we keep the same name, but we update the extension
new_name = os.path.splitext(os.path.basename(self.audio_file.name))[ new_name = (
0 os.path.splitext(os.path.basename(self.audio_file.name))[0] + f".{format}"
] + ".{}".format(format) )
version.audio_file.save(new_name, f) version.audio_file.save(new_name, f)
utils.transcode_audio( utils.transcode_audio(
audio=self.get_audio_segment(), audio=self.get_audio_segment(),
@ -1091,9 +1089,7 @@ class ImportBatch(models.Model):
tasks.import_batch_notify_followers.delay(import_batch_id=self.pk) tasks.import_batch_notify_followers.delay(import_batch_id=self.pk)
def get_federation_id(self): def get_federation_id(self):
return federation_utils.full_url( return federation_utils.full_url(f"/federation/music/import/batch/{self.uuid}")
"/federation/music/import/batch/{}".format(self.uuid)
)
class ImportJob(models.Model): class ImportJob(models.Model):
@ -1204,7 +1200,7 @@ class Library(federation_models.FederationMixin):
return self.name return self.name
def get_moderation_url(self) -> str: def get_moderation_url(self) -> str:
return "/manage/library/libraries/{}".format(self.uuid) return f"/manage/library/libraries/{self.uuid}"
def get_federation_id(self) -> str: def get_federation_id(self) -> str:
return federation_utils.full_url( return federation_utils.full_url(
@ -1212,7 +1208,7 @@ class Library(federation_models.FederationMixin):
) )
def get_absolute_url(self) -> str: def get_absolute_url(self) -> str:
return "/library/{}".format(self.uuid) return f"/library/{self.uuid}"
def save(self, **kwargs): def save(self, **kwargs):
if not self.pk and not self.fid and self.actor.is_local: if not self.pk and not self.fid and self.actor.is_local:

Wyświetl plik

@ -40,7 +40,7 @@ class CoverField(common_serializers.AttachmentSerializer):
cover_field = CoverField() cover_field = CoverField()
class OptionalDescriptionMixin(object): class OptionalDescriptionMixin:
def to_representation(self, obj): def to_representation(self, obj):
repr = super().to_representation(obj) repr = super().to_representation(obj)
if self.context.get("description", False): if self.context.get("description", False):
@ -579,7 +579,7 @@ class TrackActivitySerializer(activity_serializers.ModelSerializer):
def get_embed_url(type, id): def get_embed_url(type, id):
return settings.FUNKWHALE_EMBED_URL + "?type={}&id={}".format(type, id) return settings.FUNKWHALE_EMBED_URL + f"?type={type}&id={id}"
class OembedSerializer(serializers.Serializer): class OembedSerializer(serializers.Serializer):
@ -619,7 +619,7 @@ class OembedSerializer(serializers.Serializer):
) )
embed_type = "track" embed_type = "track"
embed_id = track.pk embed_id = track.pk
data["title"] = "{} by {}".format(track.title, track.artist.name) data["title"] = f"{track.title} by {track.artist.name}"
if track.attachment_cover: if track.attachment_cover:
data[ data[
"thumbnail_url" "thumbnail_url"
@ -658,8 +658,8 @@ class OembedSerializer(serializers.Serializer):
] = album.attachment_cover.download_url_medium_square_crop ] = album.attachment_cover.download_url_medium_square_crop
data["thumbnail_width"] = 200 data["thumbnail_width"] = 200
data["thumbnail_height"] = 200 data["thumbnail_height"] = 200
data["title"] = "{} by {}".format(album.title, album.artist.name) data["title"] = f"{album.title} by {album.artist.name}"
data["description"] = "{} by {}".format(album.title, album.artist.name) data["description"] = f"{album.title} by {album.artist.name}"
data["author_name"] = album.artist.name data["author_name"] = album.artist.name
data["height"] = 400 data["height"] = 400
data["author_url"] = federation_utils.full_url( data["author_url"] = federation_utils.full_url(

Wyświetl plik

@ -109,7 +109,7 @@ def library_track(request, pk, redirect_to_ap):
"type": "application/json+oembed", "type": "application/json+oembed",
"href": ( "href": (
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed")) utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
+ "?format=json&url={}".format(urllib.parse.quote_plus(track_url)) + f"?format=json&url={urllib.parse.quote_plus(track_url)}"
), ),
} }
) )
@ -181,7 +181,7 @@ def library_album(request, pk, redirect_to_ap):
"type": "application/json+oembed", "type": "application/json+oembed",
"href": ( "href": (
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed")) utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
+ "?format=json&url={}".format(urllib.parse.quote_plus(album_url)) + f"?format=json&url={urllib.parse.quote_plus(album_url)}"
), ),
} }
) )
@ -245,7 +245,7 @@ def library_artist(request, pk, redirect_to_ap):
"type": "application/json+oembed", "type": "application/json+oembed",
"href": ( "href": (
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed")) utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
+ "?format=json&url={}".format(urllib.parse.quote_plus(artist_url)) + f"?format=json&url={urllib.parse.quote_plus(artist_url)}"
), ),
} }
) )
@ -297,7 +297,7 @@ def library_playlist(request, pk, redirect_to_ap):
"type": "application/json+oembed", "type": "application/json+oembed",
"href": ( "href": (
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed")) utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
+ "?format=json&url={}".format(urllib.parse.quote_plus(obj_url)) + f"?format=json&url={urllib.parse.quote_plus(obj_url)}"
), ),
} }
) )

Wyświetl plik

@ -66,7 +66,7 @@ def get_cover_from_fs(dir_path):
if os.path.exists(dir_path): if os.path.exists(dir_path):
for name in FOLDER_IMAGE_NAMES: for name in FOLDER_IMAGE_NAMES:
for e, m in IMAGE_TYPES: for e, m in IMAGE_TYPES:
cover_path = os.path.join(dir_path, "{}.{}".format(name, e)) cover_path = os.path.join(dir_path, f"{name}.{e}")
if not os.path.exists(cover_path): if not os.path.exists(cover_path):
logger.debug("Cover %s does not exists", cover_path) logger.debug("Cover %s does not exists", cover_path)
continue continue
@ -764,7 +764,7 @@ def broadcast_import_status_update_to_owner(old_status, new_status, upload, **kw
from . import serializers from . import serializers
group = "user.{}.imports".format(user.pk) group = f"user.{user.pk}.imports"
channels.group_send( channels.group_send(
group, group,
{ {
@ -788,7 +788,7 @@ def clean_transcoding_cache():
limit = timezone.now() - datetime.timedelta(minutes=delay) limit = timezone.now() - datetime.timedelta(minutes=delay)
candidates = ( candidates = (
models.UploadVersion.objects.filter( models.UploadVersion.objects.filter(
(Q(accessed_date__lt=limit) | Q(accessed_date=None)) Q(accessed_date__lt=limit) | Q(accessed_date=None)
) )
.only("audio_file", "id") .only("audio_file", "id")
.order_by("id") .order_by("id")

Wyświetl plik

@ -67,9 +67,7 @@ AUDIO_EXTENSIONS_AND_MIMETYPE = [
EXTENSION_TO_MIMETYPE = {ext: mt for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE} EXTENSION_TO_MIMETYPE = {ext: mt for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE}
MIMETYPE_TO_EXTENSION = {mt: ext for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE} MIMETYPE_TO_EXTENSION = {mt: ext for ext, mt in AUDIO_EXTENSIONS_AND_MIMETYPE}
SUPPORTED_EXTENSIONS = list( SUPPORTED_EXTENSIONS = list(sorted({ext for ext, _ in AUDIO_EXTENSIONS_AND_MIMETYPE}))
sorted(set([ext for ext, _ in AUDIO_EXTENSIONS_AND_MIMETYPE]))
)
def get_ext_from_type(mimetype): def get_ext_from_type(mimetype):

Wyświetl plik

@ -101,7 +101,7 @@ def refetch_obj(obj, queryset):
return obj return obj
class HandleInvalidSearch(object): class HandleInvalidSearch:
def list(self, *args, **kwargs): def list(self, *args, **kwargs):
try: try:
return super().list(*args, **kwargs) return super().list(*args, **kwargs)
@ -532,8 +532,8 @@ def should_transcode(upload, format, max_bitrate=None):
def get_content_disposition(filename): def get_content_disposition(filename):
filename = "filename*=UTF-8''{}".format(urllib.parse.quote(filename)) filename = f"filename*=UTF-8''{urllib.parse.quote(filename)}"
return "attachment; {}".format(filename) return f"attachment; {filename}"
def record_downloads(f): def record_downloads(f):

Wyświetl plik

@ -16,10 +16,10 @@ def clean_artist_search(query, **kwargs):
return _api.search_artists(query, **cleaned_kwargs) return _api.search_artists(query, **cleaned_kwargs)
class API(object): class API:
_api = _api _api = _api
class artists(object): class artists:
search = cache_memoize( search = cache_memoize(
settings.MUSICBRAINZ_CACHE_DURATION, settings.MUSICBRAINZ_CACHE_DURATION,
prefix="memoize:musicbrainz:clean_artist_search", prefix="memoize:musicbrainz:clean_artist_search",
@ -29,13 +29,13 @@ class API(object):
prefix="memoize:musicbrainz:get_artist_by_id", prefix="memoize:musicbrainz:get_artist_by_id",
)(_api.get_artist_by_id) )(_api.get_artist_by_id)
class images(object): class images:
get_front = cache_memoize( get_front = cache_memoize(
settings.MUSICBRAINZ_CACHE_DURATION, settings.MUSICBRAINZ_CACHE_DURATION,
prefix="memoize:musicbrainz:get_image_front", prefix="memoize:musicbrainz:get_image_front",
)(_api.get_image_front) )(_api.get_image_front)
class recordings(object): class recordings:
search = cache_memoize( search = cache_memoize(
settings.MUSICBRAINZ_CACHE_DURATION, settings.MUSICBRAINZ_CACHE_DURATION,
prefix="memoize:musicbrainz:search_recordings", prefix="memoize:musicbrainz:search_recordings",
@ -45,7 +45,7 @@ class API(object):
prefix="memoize:musicbrainz:get_recording_by_id", prefix="memoize:musicbrainz:get_recording_by_id",
)(_api.get_recording_by_id) )(_api.get_recording_by_id)
class releases(object): class releases:
search = cache_memoize( search = cache_memoize(
settings.MUSICBRAINZ_CACHE_DURATION, settings.MUSICBRAINZ_CACHE_DURATION,
prefix="memoize:musicbrainz:search_releases", prefix="memoize:musicbrainz:search_releases",
@ -60,7 +60,7 @@ class API(object):
)(_api.browse_releases) )(_api.browse_releases)
# get_image_front = _api.get_image_front # get_image_front = _api.get_image_front
class release_groups(object): class release_groups:
search = cache_memoize( search = cache_memoize(
settings.MUSICBRAINZ_CACHE_DURATION, settings.MUSICBRAINZ_CACHE_DURATION,
prefix="memoize:musicbrainz:search_release_groups", prefix="memoize:musicbrainz:search_release_groups",

Wyświetl plik

@ -82,7 +82,7 @@ class Playlist(models.Model):
return self.name return self.name
def get_absolute_url(self): def get_absolute_url(self):
return "/library/playlists/{}".format(self.pk) return f"/library/playlists/{self.pk}"
@transaction.atomic @transaction.atomic
def insert(self, plt, index=None, allow_duplicates=True): def insert(self, plt, index=None, allow_duplicates=True):
@ -151,7 +151,7 @@ class Playlist(models.Model):
max_tracks = preferences.get("playlists__max_tracks") max_tracks = preferences.get("playlists__max_tracks")
if existing.count() + len(tracks) > max_tracks: if existing.count() + len(tracks) > max_tracks:
raise exceptions.ValidationError( raise exceptions.ValidationError(
"Playlist would reach the maximum of {} tracks".format(max_tracks) f"Playlist would reach the maximum of {max_tracks} tracks"
) )
if not allow_duplicates: if not allow_duplicates:

Wyświetl plik

@ -66,7 +66,7 @@ def clean_config(filter_config):
return f.clean_config(filter_config) return f.clean_config(filter_config)
class RadioFilter(object): class RadioFilter:
help_text = None help_text = None
label = None label = None
fields = [] fields = []
@ -114,7 +114,7 @@ class GroupFilter(RadioFilter):
elif operator == "or": elif operator == "or":
final_query |= query final_query |= query
else: else:
raise ValueError('Invalid query operator "{}"'.format(operator)) raise ValueError(f'Invalid query operator "{operator}"')
return final_query return final_query
def validate(self, config): def validate(self, config):
@ -171,7 +171,7 @@ class ArtistFilter(RadioFilter):
except KeyError: except KeyError:
raise ValidationError("You must provide an id") raise ValidationError("You must provide an id")
except AssertionError: except AssertionError:
raise ValidationError('No artist matching ids "{}"'.format(diff)) raise ValidationError(f'No artist matching ids "{diff}"')
@registry.register @registry.register
@ -226,7 +226,7 @@ class TagFilter(RadioFilter):
except KeyError: except KeyError:
raise ValidationError("You must provide a name") raise ValidationError("You must provide a name")
except AssertionError: except AssertionError:
raise ValidationError('No tag matching names "{}"'.format(diff)) raise ValidationError(f'No tag matching names "{diff}"')
@registry.register @registry.register

Wyświetl plik

@ -20,7 +20,7 @@ from .registries import registry
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class SimpleRadio(object): class SimpleRadio:
related_object_field = None related_object_field = None
def clean(self, instance): def clean(self, instance):

Wyświetl plik

@ -20,7 +20,7 @@ ET._original_serialize_xml = ET._serialize_xml
def _serialize_xml(write, elem, qnames, namespaces, **kwargs): def _serialize_xml(write, elem, qnames, namespaces, **kwargs):
if elem.tag == "![CDATA[": if elem.tag == "![CDATA[":
write("<%s%s]]>" % (elem.tag, elem.text)) write(f"<{elem.tag}{elem.text}]]>")
return return
return ET._original_serialize_xml(write, elem, qnames, namespaces, **kwargs) return ET._original_serialize_xml(write, elem, qnames, namespaces, **kwargs)

Wyświetl plik

@ -39,7 +39,7 @@ def get_track_path(track, suffix):
parts.append(get_valid_filepart(track.album.title)) parts.append(get_valid_filepart(track.album.title))
track_part = get_valid_filepart(track.title) + "." + suffix track_part = get_valid_filepart(track.title) + "." + suffix
if track.position: if track.position:
track_part = "{} - {}".format(track.position, track_part) track_part = f"{track.position} - {track_part}"
parts.append(track_part) parts.append(track_part)
return "/".join(parts) return "/".join(parts)
@ -84,7 +84,7 @@ class GetArtistSerializer(serializers.Serializer):
"album": [], "album": [],
} }
if artist.attachment_cover_id: if artist.attachment_cover_id:
payload["coverArt"] = "ar-{}".format(artist.id) payload["coverArt"] = f"ar-{artist.id}"
for album in albums: for album in albums:
album_data = { album_data = {
"id": album.id, "id": album.id,
@ -95,7 +95,7 @@ class GetArtistSerializer(serializers.Serializer):
"songCount": len(album.tracks.all()), "songCount": len(album.tracks.all()),
} }
if album.attachment_cover_id: if album.attachment_cover_id:
album_data["coverArt"] = "al-{}".format(album.id) album_data["coverArt"] = f"al-{album.id}"
if album.release_date: if album.release_date:
album_data["year"] = album.release_date.year album_data["year"] = album.release_date.year
payload["album"].append(album_data) payload["album"].append(album_data)
@ -128,7 +128,7 @@ def get_track_data(album, track, upload):
"type": "music", "type": "music",
} }
if album and album.attachment_cover_id: if album and album.attachment_cover_id:
data["coverArt"] = "al-{}".format(album.id) data["coverArt"] = f"al-{album.id}"
if upload.bitrate: if upload.bitrate:
data["bitrate"] = int(upload.bitrate / 1000) data["bitrate"] = int(upload.bitrate / 1000)
if upload.size: if upload.size:
@ -151,7 +151,7 @@ def get_album2_data(album):
"playCount": album.tracks.aggregate(l=Count("listenings"))["l"] or 0, "playCount": album.tracks.aggregate(l=Count("listenings"))["l"] or 0,
} }
if album.attachment_cover_id: if album.attachment_cover_id:
payload["coverArt"] = "al-{}".format(album.id) payload["coverArt"] = f"al-{album.id}"
if album.tagged_items: if album.tagged_items:
# exposes only first genre since the specification uses singular noun # exposes only first genre since the specification uses singular noun
first_genre = album.tagged_items.first() first_genre = album.tagged_items.first()
@ -308,7 +308,7 @@ def get_channel_data(channel, uploads):
"description": channel.artist.description.as_plain_text "description": channel.artist.description.as_plain_text
if channel.artist.description if channel.artist.description
else "", else "",
"coverArt": "at-{}".format(channel.artist.attachment_cover.uuid) "coverArt": f"at-{channel.artist.attachment_cover.uuid}"
if channel.artist.attachment_cover if channel.artist.attachment_cover
else "", else "",
"originalImageUrl": channel.artist.attachment_cover.url "originalImageUrl": channel.artist.attachment_cover.url
@ -333,7 +333,7 @@ def get_channel_episode_data(upload, channel_id):
"description": upload.track.description.as_plain_text "description": upload.track.description.as_plain_text
if upload.track.description if upload.track.description
else "", else "",
"coverArt": "at-{}".format(upload.track.attachment_cover.uuid) "coverArt": f"at-{upload.track.attachment_cover.uuid}"
if upload.track.attachment_cover if upload.track.attachment_cover
else "", else "",
"isDir": "false", "isDir": "false",

Wyświetl plik

@ -67,7 +67,7 @@ def find_object(
{ {
"error": { "error": {
"code": 0, "code": 0,
"message": 'For input string "{}"'.format(raw_value), "message": f'For input string "{raw_value}"',
} }
} }
) )
@ -86,7 +86,7 @@ def find_object(
{ {
"error": { "error": {
"code": 70, "code": 70,
"message": "{} not found".format(qs.model.__name__), "message": f"{qs.model.__name__} not found",
} }
} }
) )
@ -904,7 +904,7 @@ class SubsonicViewSet(viewsets.GenericViewSet):
{ {
"error": { "error": {
"code": 0, "code": 0,
"message": "Error while fetching url: {}".format(e), "message": f"Error while fetching url: {e}",
} }
} }
) )

Wyświetl plik

@ -14,7 +14,7 @@ class TagNameField(serializers.CharField):
def to_internal_value(self, value): def to_internal_value(self, value):
value = super().to_internal_value(value) value = super().to_internal_value(value)
if not models.TAG_REGEX.match(value): if not models.TAG_REGEX.match(value):
raise serializers.ValidationError('Invalid tag "{}"'.format(value)) raise serializers.ValidationError(f'Invalid tag "{value}"')
return value return value

Wyświetl plik

@ -14,16 +14,14 @@ def get_tags_from_foreign_key(
""" """
data = {} data = {}
objs = foreign_key_model.objects.filter( objs = foreign_key_model.objects.filter(
**{"{}__pk__in".format(foreign_key_attr): ids} **{f"{foreign_key_attr}__pk__in": ids}
).order_by("-id") ).order_by("-id")
objs = objs.only("id", "{}_id".format(foreign_key_attr)).prefetch_related( objs = objs.only("id", f"{foreign_key_attr}_id").prefetch_related(tagged_items_attr)
tagged_items_attr
)
for obj in objs.iterator(): for obj in objs.iterator():
# loop on all objects, store the objs tags + counter on the corresponding foreign key # loop on all objects, store the objs tags + counter on the corresponding foreign key
row_data = data.setdefault( row_data = data.setdefault(
getattr(obj, "{}_id".format(foreign_key_attr)), getattr(obj, f"{foreign_key_attr}_id"),
{"total_objs": 0, "tags": []}, {"total_objs": 0, "tags": []},
) )
row_data["total_objs"] += 1 row_data["total_objs"] += 1

Wyświetl plik

@ -1,5 +1,3 @@
from __future__ import absolute_import
import functools import functools
import logging import logging
import os import os
@ -22,7 +20,7 @@ app = celery.Celery("funkwhale_api")
@celery.signals.task_failure.connect @celery.signals.task_failure.connect
def process_failure(sender, task_id, exception, args, kwargs, traceback, einfo, **kw): def process_failure(sender, task_id, exception, args, kwargs, traceback, einfo, **kw):
print("[celery] Error during task {}: {}".format(task_id, einfo.exception)) print(f"[celery] Error during task {task_id}: {einfo.exception}")
tb.print_exc() tb.print_exc()

Wyświetl plik

@ -1 +0,0 @@
# -*- coding: utf-8 -*-

Wyświetl plik

@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django import forms from django import forms
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm from django.contrib.auth.forms import UserChangeForm, UserCreationForm

Wyświetl plik

@ -9,7 +9,7 @@ from .oauth import scopes as available_scopes
def generate_scoped_token(user_id, user_secret, scopes): def generate_scoped_token(user_id, user_secret, scopes):
if set(scopes) & set(available_scopes.SCOPES_BY_ID) != set(scopes): if set(scopes) & set(available_scopes.SCOPES_BY_ID) != set(scopes):
raise ValueError("{} contains invalid scopes".format(scopes)) raise ValueError(f"{scopes} contains invalid scopes")
return signing.dumps( return signing.dumps(
{ {

Wyświetl plik

@ -10,7 +10,7 @@ from . import models
@registry.register @registry.register
class GroupFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory): class GroupFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: "group-{0}".format(n)) name = factory.Sequence(lambda n: f"group-{n}")
class Meta: class Meta:
model = "auth.Group" model = "auth.Group"

Wyświetl plik

@ -1,6 +1,3 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import datetime import datetime
import os import os
import random import random
@ -33,7 +30,7 @@ def get_token(length=5):
wordlist_path = os.path.join( wordlist_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "wordlist.txt" os.path.dirname(os.path.abspath(__file__)), "wordlist.txt"
) )
with open(wordlist_path, "r") as f: with open(wordlist_path) as f:
words = f.readlines() words = f.readlines()
phrase = "".join(random.choice(words) for i in range(length)) phrase = "".join(random.choice(words) for i in range(length))
return phrase.replace("\n", "-").rstrip("-") return phrase.replace("\n", "-").rstrip("-")
@ -201,11 +198,7 @@ class User(AbstractUser):
defaults = defaults or preferences.get("users__default_permissions") defaults = defaults or preferences.get("users__default_permissions")
perms = {} perms = {}
for p in PERMISSIONS: for p in PERMISSIONS:
v = ( v = self.is_superuser or getattr(self, f"permission_{p}") or p in defaults
self.is_superuser
or getattr(self, "permission_{}".format(p))
or p in defaults
)
perms[p] = v perms[p] = v
return perms return perms
@ -226,7 +219,7 @@ class User(AbstractUser):
def has_permissions(self, *perms, **kwargs): def has_permissions(self, *perms, **kwargs):
operator = kwargs.pop("operator", "and") operator = kwargs.pop("operator", "and")
if operator not in ["and", "or"]: if operator not in ["and", "or"]:
raise ValueError("Invalid operator {}".format(operator)) raise ValueError(f"Invalid operator {operator}")
permissions = self.get_permissions() permissions = self.get_permissions()
checker = all if operator == "and" else any checker = all if operator == "and" else any
return checker([permissions[p] for p in perms]) return checker([permissions[p] for p in perms])
@ -249,7 +242,7 @@ class User(AbstractUser):
self.update_subsonic_api_token() self.update_subsonic_api_token()
def get_activity_url(self): def get_activity_url(self):
return settings.FUNKWHALE_URL + "/@{}".format(self.username) return settings.FUNKWHALE_URL + f"/@{self.username}"
def record_activity(self): def record_activity(self):
""" """
@ -292,16 +285,16 @@ class User(AbstractUser):
def get_channels_groups(self): def get_channels_groups(self):
groups = ["imports", "inbox"] groups = ["imports", "inbox"]
groups = ["user.{}.{}".format(self.pk, g) for g in groups] groups = [f"user.{self.pk}.{g}" for g in groups]
for permission, value in self.all_permissions.items(): for permission, value in self.all_permissions.items():
if value: if value:
groups.append("admin.{}".format(permission)) groups.append(f"admin.{permission}")
return groups return groups
def full_username(self) -> str: def full_username(self) -> str:
return "{}@{}".format(self.username, settings.FEDERATION_HOSTNAME) return f"{self.username}@{settings.FEDERATION_HOSTNAME}"
def get_avatar(self): def get_avatar(self):
if not self.actor: if not self.actor:

Wyświetl plik

@ -55,12 +55,12 @@ class ScopePermission(permissions.BasePermission):
anonymous_policy = getattr(view, "anonymous_policy", False) anonymous_policy = getattr(view, "anonymous_policy", False)
if anonymous_policy not in [True, False, "setting"]: if anonymous_policy not in [True, False, "setting"]:
raise ImproperlyConfigured( raise ImproperlyConfigured(
"{} is not a valid value for anonymous_policy".format(anonymous_policy) f"{anonymous_policy} is not a valid value for anonymous_policy"
) )
if isinstance(scope_config, str): if isinstance(scope_config, str):
scope_config = { scope_config = {
"read": "read:{}".format(scope_config), "read": f"read:{scope_config}",
"write": "write:{}".format(scope_config), "write": f"write:{scope_config}",
} }
action = METHOD_SCOPE_MAPPING[request.method.lower()] action = METHOD_SCOPE_MAPPING[request.method.lower()]
required_scope = scope_config[action] required_scope = scope_config[action]

Wyświetl plik

@ -5,7 +5,7 @@ class Scope:
self.children = children or [] self.children = children or []
def copy(self, prefix): def copy(self, prefix):
return Scope("{}:{}".format(prefix, self.id)) return Scope(f"{prefix}:{self.id}")
BASE_SCOPES = [ BASE_SCOPES = [

Wyświetl plik

@ -665,7 +665,7 @@ def test_rss_feed_item_serializer_create(factories):
expected_uuid = uuid.uuid3( expected_uuid = uuid.uuid3(
uuid.NAMESPACE_URL, uuid.NAMESPACE_URL,
"rss://{}-16f66fff-41ae-4a1c-9101-2746218c4f32".format(channel.pk), f"rss://{channel.pk}-16f66fff-41ae-4a1c-9101-2746218c4f32",
) )
assert upload.library == channel.library assert upload.library == channel.library
assert upload.import_status == "finished" assert upload.import_status == "finished"
@ -692,7 +692,7 @@ def test_rss_feed_item_serializer_update(factories):
channel = factories["audio.Channel"](rss_url=rss_url, external=True) channel = factories["audio.Channel"](rss_url=rss_url, external=True)
expected_uuid = uuid.uuid3( expected_uuid = uuid.uuid3(
uuid.NAMESPACE_URL, uuid.NAMESPACE_URL,
"rss://{}-16f66fff-41ae-4a1c-9101-2746218c4f32".format(channel.pk), f"rss://{channel.pk}-16f66fff-41ae-4a1c-9101-2746218c4f32",
) )
upload = factories["music.Upload"]( upload = factories["music.Upload"](
track__uuid=expected_uuid, track__uuid=expected_uuid,

Wyświetl plik

@ -14,8 +14,8 @@ def test_channel_detail(attribute, spa_html, no_api_auth, client, factories, set
library__privacy_level="everyone", artist__with_cover=True library__privacy_level="everyone", artist__with_cover=True
) )
factories["music.Upload"](playable=True, library=channel.library) factories["music.Upload"](playable=True, library=channel.library)
url = "/channels/{}".format(utils.recursive_getattr(channel, attribute)) url = f"/channels/{utils.recursive_getattr(channel, attribute)}"
detail_url = "/channels/{}".format(channel.actor.full_username) detail_url = f"/channels/{channel.actor.full_username}"
response = client.get(url) response = client.get(url)
@ -44,7 +44,7 @@ def test_channel_detail(attribute, spa_html, no_api_auth, client, factories, set
"rel": "alternate", "rel": "alternate",
"type": "application/rss+xml", "type": "application/rss+xml",
"href": channel.get_rss_url(), "href": channel.get_rss_url(),
"title": "{} - RSS Podcast Feed".format(channel.artist.name), "title": f"{channel.artist.name} - RSS Podcast Feed",
}, },
{ {
"tag": "link", "tag": "link",
@ -81,8 +81,8 @@ def test_oembed_channel(factories, no_api_auth, api_client, settings):
channel = factories["audio.Channel"](artist__with_cover=True) channel = factories["audio.Channel"](artist__with_cover=True)
artist = channel.artist artist = channel.artist
url = reverse("api:v1:oembed") url = reverse("api:v1:oembed")
obj_url = "https://test.com/channels/{}".format(channel.uuid) obj_url = f"https://test.com/channels/{channel.uuid}"
iframe_src = "http://embed?type=channel&id={}".format(channel.uuid) iframe_src = f"http://embed?type=channel&id={channel.uuid}"
expected = { expected = {
"version": "1.0", "version": "1.0",
"type": "rich", "type": "rich",

Wyświetl plik

@ -442,7 +442,7 @@ def test_can_filter_channels_through_api_scope(factories, logged_in_api_client):
factories["audio.Channel"]() factories["audio.Channel"]()
url = reverse("api:v1:channels-list") url = reverse("api:v1:channels-list")
response = logged_in_api_client.get( response = logged_in_api_client.get(
url, {"scope": "actor:{}".format(channel.attributed_to.full_username)} url, {"scope": f"actor:{channel.attributed_to.full_username}"}
) )
assert response.status_code == 200 assert response.status_code == 200

Wyświetl plik

@ -34,7 +34,7 @@ def test_should_verify_email(
def test_app_token_authentication(factories, api_request): def test_app_token_authentication(factories, api_request):
user = factories["users.User"]() user = factories["users.User"]()
app = factories["users.Application"](user=user, scope="read write") app = factories["users.Application"](user=user, scope="read write")
request = api_request.get("/", HTTP_AUTHORIZATION="Bearer {}".format(app.token)) request = api_request.get("/", HTTP_AUTHORIZATION=f"Bearer {app.token}")
auth = authentication.ApplicationTokenAuthentication() auth = authentication.ApplicationTokenAuthentication()
assert auth.authenticate(request)[0] == app.user assert auth.authenticate(request)[0] == app.user

Some files were not shown because too many files have changed in this diff Show More