Fix warnings from flake8-comprehensions.

pull/7965/head
Mads Jensen 2022-02-10 09:37:32 +01:00 zatwierdzone przez Dan Braghis
rodzic a0ef2477a6
commit de3fcba9e9
51 zmienionych plików z 222 dodań i 221 usunięć

Wyświetl plik

@ -31,10 +31,10 @@ on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
html_theme = 'sphinx_wagtail_theme'
html_theme_path = [sphinx_wagtail_theme.get_html_theme_path()]
html_theme_options = dict(
project_name="Wagtail Documentation",
github_url="https://github.com/wagtail/wagtail/blob/main/docs/"
)
html_theme_options = {
"project_name": "Wagtail Documentation",
"github_url": "https://github.com/wagtail/wagtail/blob/main/docs/"
}
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the

Wyświetl plik

@ -717,7 +717,7 @@ class TestPasswordReset(TestCase, WagtailTestUtils):
# Create url_args
token = auth_views.PasswordResetConfirmView.reset_url_token
self.url_kwargs = dict(uidb64=self.password_reset_uid, token=token)
self.url_kwargs = {"uidb64": self.password_reset_uid, "token": token}
# Add token to session object
s = self.client.session
@ -733,7 +733,7 @@ class TestPasswordReset(TestCase, WagtailTestUtils):
self.setup_password_reset_confirm_tests()
# Create invalid url_args
self.url_kwargs = dict(uidb64=self.password_reset_uid, token="invalid-token")
self.url_kwargs = {"uidb64": self.password_reset_uid, "token": "invalid-token"}
# Get password reset confirm page
response = self.client.get(reverse('wagtailadmin_password_reset_confirm', kwargs=self.url_kwargs))

Wyświetl plik

@ -351,7 +351,7 @@ class TestTabbedInterface(TestCase):
def test_required_fields(self):
# required_fields should report the set of form fields to be rendered recursively by children of TabbedInterface
result = set(self.event_page_tabbed_interface.required_fields())
self.assertEqual(result, set(['title', 'date_from', 'date_to']))
self.assertEqual(result, {'title', 'date_from', 'date_to'})
def test_render_form_content(self):
EventPageForm = self.event_page_tabbed_interface.get_form_class()

Wyświetl plik

@ -204,7 +204,7 @@ class TestSetPrivacyView(TestCase, WagtailTestUtils):
# Check that the groups are set correctly
self.assertEqual(
set(PageViewRestriction.objects.get(page=self.public_page).groups.all()),
set([self.group, self.group2])
{self.group, self.group2}
)
def test_set_group_restriction_password_unset(self):

Wyświetl plik

@ -1779,7 +1779,7 @@ class TestWorkflowUsageView(TestCase, WagtailTestUtils):
self.assertEqual(response.status_code, 200)
object_set = set(page.id for page in response.context['used_by'].object_list)
object_set = {page.id for page in response.context['used_by'].object_list}
self.assertIn(self.root_page.id, object_set)
self.assertIn(self.home_page.id, object_set)
self.assertNotIn(self.child_page_with_another_workflow.id, object_set)

Wyświetl plik

@ -5,7 +5,7 @@ class BaseItem:
template = 'wagtailadmin/userbar/item_base.html'
def render(self, request):
return render_to_string(self.template, dict(self=self, request=request), request=request)
return render_to_string(self.template, {"self": self, "request": request}, request=request)
class AdminItem(BaseItem):

Wyświetl plik

@ -211,7 +211,7 @@ def account(request):
panels = [panel for panel in panels if panel.is_active()]
# Get tabs and order them
tabs = list(set(panel.tab for panel in panels))
tabs = list({panel.tab for panel in panels})
tabs.sort(key=lambda tab: tab.order)
# Get dict of tabs to ordered panels

Wyświetl plik

@ -26,7 +26,7 @@ class BulkAction(ABC, FormView):
def aria_label(self):
pass
extras = dict()
extras = {}
action_priority = 100
models = []
classes = set()

Wyświetl plik

@ -4,7 +4,7 @@ from wagtail.core import hooks
class BulkActionRegistry:
def __init__(self):
self.actions = dict() # {app_name: {model_name: {action_name: action_class]}}
self.actions = {} # {app_name: {model_name: {action_name: action_class]}}
self.has_scanned_for_bulk_actions = False
def _scan_for_bulk_actions(self):

Wyświetl plik

@ -326,7 +326,7 @@ class TestDocumentListing(TestCase):
document_id_list = self.get_document_id_list(content)
self.assertEqual(set(document_id_list), set([2]))
self.assertEqual(set(document_id_list), {2})
def test_search_with_order(self):
response = self.get_response(search='james', order='title')

Wyświetl plik

@ -326,7 +326,7 @@ class TestImageListing(TestCase):
image_id_list = self.get_image_id_list(content)
self.assertEqual(set(image_id_list), set([5]))
self.assertEqual(set(image_id_list), {5})
def test_search_with_order(self):
response = self.get_response(search='james', order='title')

Wyświetl plik

@ -855,7 +855,7 @@ class TestPageListing(TestCase, WagtailTestUtils):
page_id_list = self.get_page_id_list(content)
# Check that the items are the blog index and three blog pages
self.assertEqual(set(page_id_list), set([5, 16, 18, 19]))
self.assertEqual(set(page_id_list), {5, 16, 18, 19})
def test_search_with_type(self):
response = self.get_response(type='demosite.BlogEntryPage', search='blog')
@ -863,7 +863,7 @@ class TestPageListing(TestCase, WagtailTestUtils):
page_id_list = self.get_page_id_list(content)
self.assertEqual(set(page_id_list), set([16, 18, 19]))
self.assertEqual(set(page_id_list), {16, 18, 19})
def test_search_with_filter(self):
response = self.get_response(title="Another blog post", search='blog', order='title')
@ -920,7 +920,7 @@ class TestPageListing(TestCase, WagtailTestUtils):
page_id_list = self.get_page_id_list(content)
self.assertEqual(set(page_id_list), set([18]))
self.assertEqual(set(page_id_list), {18})
def test_search_operator_or(self):
response = self.get_response(type='demosite.BlogEntryPage', search='blog again', search_operator='or')
@ -928,7 +928,7 @@ class TestPageListing(TestCase, WagtailTestUtils):
page_id_list = self.get_page_id_list(content)
self.assertEqual(set(page_id_list), set([16, 18, 19]))
self.assertEqual(set(page_id_list), {16, 18, 19})
def test_empty_searches_work(self):
response = self.get_response(search='')

Wyświetl plik

@ -224,7 +224,7 @@ class SubmissionsListView(SpreadsheetExportMixin, SafePaginateListView):
def get_validated_ordering(self):
""" Return a dict of field names with ordering labels if ordering is valid """
orderable_fields = self.orderable_fields or ()
ordering = dict()
ordering = {}
if self.is_export:
# Revert to CSV order_by submit_time ascending for backwards compatibility
default_ordering = self.ordering_csv or ()
@ -252,7 +252,7 @@ class SubmissionsListView(SpreadsheetExportMixin, SafePaginateListView):
def get_filtering(self):
""" Return filering as a dict for submissions queryset """
self.select_date_form = SelectDateForm(self.request.GET)
result = dict()
result = {}
if self.select_date_form.is_valid():
date_from = self.select_date_form.cleaned_data.get('date_from')
date_to = self.select_date_form.cleaned_data.get('date_to')

Wyświetl plik

@ -35,7 +35,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['varnish']))
self.assertEqual(set(backends.keys()), {'varnish'})
self.assertIsInstance(backends['varnish'], HTTPBackend)
self.assertEqual(backends['varnish'].cache_scheme, 'http')
@ -52,7 +52,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['cloudflare']))
self.assertEqual(set(backends.keys()), {'cloudflare'})
self.assertIsInstance(backends['cloudflare'], CloudflareBackend)
self.assertEqual(backends['cloudflare'].cloudflare_email, 'test@test.com')
@ -67,7 +67,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['cloudfront']))
self.assertEqual(set(backends.keys()), {'cloudfront'})
self.assertIsInstance(backends['cloudfront'], CloudfrontBackend)
self.assertEqual(backends['cloudfront'].cloudfront_distribution_id, 'frontend')
@ -82,7 +82,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['azure_cdn']))
self.assertEqual(set(backends.keys()), {'azure_cdn'})
self.assertIsInstance(backends['azure_cdn'], AzureCdnBackend)
self.assertEqual(backends['azure_cdn']._resource_group_name, 'test-resource-group')
self.assertEqual(backends['azure_cdn']._cdn_profile_name, 'wagtail-io-profile')
@ -97,7 +97,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['azure_front_door']))
self.assertEqual(set(backends.keys()), {'azure_front_door'})
self.assertIsInstance(backends['azure_front_door'], AzureFrontDoorBackend)
self.assertEqual(backends['azure_front_door']._resource_group_name, 'test-resource-group')
self.assertEqual(backends['azure_front_door']._front_door_name, 'wagtail-io-front-door')
@ -114,7 +114,7 @@ class TestBackendConfiguration(TestCase):
'CREDENTIALS': mock_credentials,
},
})
self.assertEqual(set(backends.keys()), set(['azure_cdn']))
self.assertEqual(set(backends.keys()), {'azure_cdn'})
client = backends['azure_cdn']._get_client()
self.assertIsInstance(client, CdnManagementClient)
self.assertEqual(client.config.subscription_id, 'fake-subscription-id')
@ -132,7 +132,7 @@ class TestBackendConfiguration(TestCase):
},
})
client = backends['azure_front_door']._get_client()
self.assertEqual(set(backends.keys()), set(['azure_front_door']))
self.assertEqual(set(backends.keys()), {'azure_front_door'})
self.assertIsInstance(client, FrontDoorManagementClient)
self.assertEqual(client.config.subscription_id, 'fake-subscription-id')
self.assertIs(client.config.credentials, mock_credentials)
@ -149,7 +149,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['azure_cdn']))
self.assertEqual(set(backends.keys()), {'azure_cdn'})
self.assertIsInstance(backends['azure_cdn'], AzureCdnBackend)
# purge()
@ -181,7 +181,7 @@ class TestBackendConfiguration(TestCase):
},
})
self.assertEqual(set(backends.keys()), set(['azure_front_door']))
self.assertEqual(set(backends.keys()), {'azure_front_door'})
self.assertIsInstance(backends['azure_front_door'], AzureFrontDoorBackend)
# purge()
@ -242,7 +242,7 @@ class TestBackendConfiguration(TestCase):
'LOCATION': 'http://localhost:8000',
},
})
self.assertEqual(set(backends.keys()), set(['varnish']))
self.assertEqual(set(backends.keys()), {'varnish'})
self.assertIsInstance(backends['varnish'], HTTPBackend)
# and mocked urlopen that may or may not raise network-related exception
urlopen_mock.side_effect = urlopen_side_effect
@ -293,7 +293,7 @@ class TestBackendConfiguration(TestCase):
}
})
self.assertEqual(set(backends.keys()), set(['varnish', 'cloudflare']))
self.assertEqual(set(backends.keys()), {'varnish', 'cloudflare'})
def test_filter(self):
backends = get_backends(backend_settings={
@ -309,13 +309,13 @@ class TestBackendConfiguration(TestCase):
}
}, backends=['cloudflare'])
self.assertEqual(set(backends.keys()), set(['cloudflare']))
self.assertEqual(set(backends.keys()), {'cloudflare'})
@override_settings(WAGTAILFRONTENDCACHE_LOCATION='http://localhost:8000')
def test_backwards_compatibility(self):
backends = get_backends()
self.assertEqual(set(backends.keys()), set(['default']))
self.assertEqual(set(backends.keys()), {'default'})
self.assertIsInstance(backends['default'], HTTPBackend)
self.assertEqual(backends['default'].cache_scheme, 'http')
self.assertEqual(backends['default'].cache_netloc, 'localhost:8000')

Wyświetl plik

@ -687,7 +687,7 @@ class ModelAdminGroup(WagtailRegisterable):
Utilised by Wagtail's 'register_admin_urls' hook to register urls for
used by any associated ModelAdmin instances
"""
urls = tuple()
urls = ()
for instance in self.modeladmin_instances:
urls += instance.get_admin_urls_for_registration()
return urls

Wyświetl plik

@ -22,7 +22,7 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
which have been defined via model Person.edit_handler"""
response = self.client.get('/admin/modeladmintest/person/create/')
self.assertEqual(
[field_name for field_name in response.context['form'].fields],
list(response.context['form'].fields),
['first_name', 'last_name', 'phone_number']
)
@ -53,7 +53,7 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
which have been defined via model Friend.panels"""
response = self.client.get('/admin/modeladmintest/friend/create/')
self.assertEqual(
[field_name for field_name in response.context['form'].fields],
list(response.context['form'].fields),
['first_name', 'phone_number']
)
@ -62,7 +62,7 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
which have been defined via model VisitorAdmin.edit_handler"""
response = self.client.get('/admin/modeladmintest/visitor/create/')
self.assertEqual(
[field_name for field_name in response.context['form'].fields],
list(response.context['form'].fields),
['last_name', 'phone_number', 'address']
)
@ -71,7 +71,7 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
which have been defined via model ContributorAdmin.panels"""
response = self.client.get('/admin/modeladmintest/contributor/create/')
self.assertEqual(
[field_name for field_name in response.context['form'].fields],
list(response.context['form'].fields),
['last_name', 'phone_number', 'address']
)
@ -86,7 +86,7 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
form_class = edit_handler.get_form_class()
form = form_class()
self.assertEqual(
[field_name for field_name in form.fields],
list(form.fields),
['first_name', 'last_name', 'phone_number']
)
@ -104,7 +104,7 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
form_class = edit_handler.get_form_class()
form = form_class()
self.assertEqual(
[field_name for field_name in form.fields],
list(form.fields),
['last_name', 'phone_number', 'address']
)
@ -126,6 +126,6 @@ class TestExtractPanelDefinitionsFromModelAdmin(TestCase, WagtailTestUtils):
form_class = edit_handler.get_form_class()
form = form_class()
self.assertEqual(
[field_name for field_name in form.fields],
list(form.fields),
['phone_number', 'address']
)

Wyświetl plik

@ -88,14 +88,14 @@ class TestSearchBackendHandler(TestCase):
queryset = self.get_queryset()
search_kwargs = search_handler.search_queryset(queryset, 'test')
self.assertTrue(mocked_method.called)
self.assertEqual(search_kwargs, dict(
query='test',
model_or_queryset=queryset,
fields=None,
operator=None,
order_by_relevance=True,
partial_match=True,
))
self.assertEqual(search_kwargs, {
"query": 'test',
"model_or_queryset": queryset,
"fields": None,
"operator": None,
"order_by_relevance": True,
"partial_match": True,
})
@patch('wagtail.contrib.modeladmin.helpers.search.get_search_backend', return_value=FakeSearchBackend())
def test_search_queryset_with_search_fields(self, mocked_method):
@ -106,14 +106,14 @@ class TestSearchBackendHandler(TestCase):
queryset = self.get_queryset()
search_kwargs = search_handler.search_queryset(queryset, 'test')
self.assertTrue(mocked_method.called)
self.assertEqual(search_kwargs, dict(
query='test',
model_or_queryset=queryset,
fields=search_fields,
operator=None,
order_by_relevance=True,
partial_match=True,
))
self.assertEqual(search_kwargs, {
"query": 'test',
"model_or_queryset": queryset,
"fields": search_fields,
"operator": None,
"order_by_relevance": True,
"partial_match": True,
})
@patch('wagtail.contrib.modeladmin.helpers.search.get_search_backend', return_value=FakeSearchBackend())
def test_search_queryset_preserve_order(self, get_search_backend):
@ -121,14 +121,14 @@ class TestSearchBackendHandler(TestCase):
queryset = self.get_queryset()
search_kwargs = search_handler.search_queryset(queryset, 'Lord', preserve_order=True)
self.assertEqual(search_kwargs, dict(
query='Lord',
model_or_queryset=queryset,
fields=None,
operator=None,
order_by_relevance=False,
partial_match=True,
))
self.assertEqual(search_kwargs, {
"query": 'Lord',
"model_or_queryset": queryset,
"fields": None,
"operator": None,
"order_by_relevance": False,
"partial_match": True,
})
def test_show_search_form(self):
search_handler = self.get_search_handler(search_fields=None)

Wyświetl plik

@ -246,7 +246,7 @@ class TestCreateView(TestCase, WagtailTestUtils):
self.assertIn('name', response.content.decode('UTF-8'))
self.assertNotIn('headquartered_in', response.content.decode('UTF-8'))
self.assertEqual(
[ii for ii in response.context['form'].fields],
list(response.context['form'].fields),
['name']
)
self.client.post('/admin/modeladmintest/publisher/create/', {

Wyświetl plik

@ -16,6 +16,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='redirect',
unique_together=set([('old_path', 'site')]),
unique_together={('old_path', 'site')},
),
]

Wyświetl plik

@ -73,9 +73,9 @@ def autocreate_redirects_on_page_move(
# This value is used to prevent creation redirects that link
# from one site to another
new_site_ids = set(
new_site_ids = {
item.site_id for item in instance._get_relevant_site_root_paths(cache_object=instance)
)
}
# Determine sites to create redirects for
sites = Site.objects.exclude(root_page=instance).filter(id__in=[

Wyświetl plik

@ -60,7 +60,7 @@ class TestAutocreateRedirects(TestCase, WagtailTestUtils):
# gather all of the redirects that were created
redirects = Redirect.objects.all()
redirect_page_ids = set(r.redirect_page_id for r in redirects)
redirect_page_ids = {r.redirect_page_id for r in redirects}
# a redirect should have been created for the page itself
self.assertIn(test_subject.id, redirect_page_ids)

Wyświetl plik

@ -68,7 +68,7 @@ def index(request):
'redirects': redirects,
'query_string': query_string,
'search_form': SearchForm(
data=dict(q=query_string) if query_string else None, placeholder=_("Search redirects")
data={"q": query_string} if query_string else None, placeholder=_("Search redirects")
),
'user_can_add': permission_policy.user_has_permission(request.user, 'add'),
})
@ -173,7 +173,7 @@ def start_import(request):
"wagtailredirects/choose_import_file.html",
{
'search_form': SearchForm(
data=dict(q=query_string) if query_string else None, placeholder=_("Search redirects")
data={"q": query_string} if query_string else None, placeholder=_("Search redirects")
),
"form": form,
},

Wyświetl plik

@ -19,7 +19,7 @@ class SearchPromotionForm(forms.ModelForm):
fields = ('query', 'page', 'description')
widgets = {
'description': forms.Textarea(attrs=dict(rows=3)),
'description': forms.Textarea(attrs={"rows": 3}),
}

Wyświetl plik

@ -64,7 +64,7 @@ def index(request):
'queries': queries,
'query_string': query_string,
'search_form': SearchForm(
data=dict(q=query_string) if query_string else None, placeholder=_("Search promoted results")
data={"q": query_string} if query_string else None, placeholder=_("Search promoted results")
),
})
@ -175,7 +175,7 @@ def edit(request, query_id):
# specific errors will be displayed within form fields
else:
query_form = search_forms.QueryForm(initial=dict(query_string=query.query_string))
query_form = search_forms.QueryForm(initial={"query_string": query.query_string})
searchpicks_formset = forms.SearchPromotionsFormSet(instance=query)
return TemplateResponse(request, 'wagtailsearchpromotions/edit.html', {

Wyświetl plik

@ -189,23 +189,23 @@ class BaseStructBlock(Block):
def get_prep_value(self, value):
""" Recursively call get_prep_value on children and return as a plain dict """
return dict([
(name, self.child_blocks[name].get_prep_value(val))
return {
name: self.child_blocks[name].get_prep_value(val)
for name, val in value.items()
])
}
def get_form_state(self, value):
return dict([
(name, self.child_blocks[name].get_form_state(val))
return {
name: self.child_blocks[name].get_form_state(val)
for name, val in value.items()
])
}
def get_api_representation(self, value, context=None):
""" Recursively call get_api_representation on children and return as a plain dict """
return dict([
(name, self.child_blocks[name].get_api_representation(val, context=context))
return {
name: self.child_blocks[name].get_api_representation(val, context=context)
for name, val in value.items()
])
}
def get_searchable_content(self, value):
content = []

Wyświetl plik

@ -153,7 +153,7 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='site',
unique_together=set([('hostname', 'port')]),
unique_together={('hostname', 'port')},
),
migrations.AddField(
model_name='grouppagepermission',

Wyświetl plik

@ -266,7 +266,7 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='grouppagepermission',
unique_together=set([('group', 'page', 'permission_type')]),
unique_together={('group', 'page', 'permission_type')},
),
migrations.AlterModelOptions(
name='grouppagepermission',
@ -342,7 +342,7 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='site',
unique_together=set([('hostname', 'port')]),
unique_together={('hostname', 'port')},
),
migrations.AlterModelOptions(
name='site',

Wyświetl plik

@ -23,6 +23,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='grouppagepermission',
unique_together=set([('group', 'page', 'permission_type')]),
unique_together={('group', 'page', 'permission_type')},
),
]

Wyświetl plik

@ -25,6 +25,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='groupcollectionpermission',
unique_together=set([('group', 'collection', 'permission')]),
unique_together={('group', 'collection', 'permission')},
),
]

Wyświetl plik

@ -1209,7 +1209,7 @@ class Page(AbstractPage, index.Indexed, ClusterableModel, metaclass=PageBase):
# Get number of unique sites in root paths
# Note: there may be more root paths to sites if there are multiple languages
num_sites = len(set(root_path[0] for root_path in self._get_site_root_paths(request)))
num_sites = len({root_path[0] for root_path in self._get_site_root_paths(request)})
if (current_site is not None and site_id == current_site.id) or num_sites == 1:
# the site matches OR we're only running a single site, so a local URL is sufficient
@ -2275,10 +2275,10 @@ class PagePermissionTester:
self.page_is_root = page.depth == 1 # Equivalent to page.is_root()
if self.user.is_active and not self.user.is_superuser:
self.permissions = set(
self.permissions = {
perm.permission_type for perm in user_perms.permissions
if self.page.path.startswith(perm.page.path)
)
}
def user_has_lock(self):
return self.page.locked_by_id == self.user.pk
@ -3411,7 +3411,7 @@ class PageLogEntryQuerySet(LogEntryQuerySet):
# for reporting purposes, pages of all types are combined under a single "Page"
# object type
if self.exists():
return set([ContentType.objects.get_for_model(Page).pk])
return {ContentType.objects.get_for_model(Page).pk}
else:
return set()

Wyświetl plik

@ -195,10 +195,10 @@ class PageQuerySet(SearchableQuerySetMixin, TreeQuerySet):
return self.exclude(self.page_q(other))
def type_q(self, *types):
all_subclasses = set(
all_subclasses = {
model for model in apps.get_models()
if issubclass(model, types)
)
}
content_types = ContentType.objects.get_for_models(*all_subclasses)
return Q(content_type__in=list(content_types.values()))

Wyświetl plik

@ -13,17 +13,17 @@ from django.test import TestCase
class TestForMigrations(TestCase):
def test__migrations(self):
app_labels = set(app.label for app in apps.get_app_configs()
if app.name.startswith('wagtail.'))
app_labels = {app.label for app in apps.get_app_configs()
if app.name.startswith('wagtail.')}
for app_label in app_labels:
apps.get_app_config(app_label.split('.')[-1])
loader = MigrationLoader(None, ignore_no_migrations=True)
conflicts = dict(
conflicts = {
(app_label, conflict)
for app_label, conflict in loader.detect_conflicts().items()
if app_label in app_labels
)
}
if conflicts:
name_str = "; ".join("%s in %s" % (", ".join(names), app)

Wyświetl plik

@ -1265,7 +1265,7 @@ class TestCopyPage(TestCase):
# Also, check that the child objects in the new revision are given new IDs
old_speakers_ids = set(christmas_event.speakers.values_list('id', flat=True))
new_speakers_ids = set(speaker['pk'] for speaker in new_revision_content['speakers'])
new_speakers_ids = {speaker['pk'] for speaker in new_revision_content['speakers']}
self.assertFalse(
old_speakers_ids.intersection(new_speakers_ids),
"Child objects in revisions were not given a new primary key"

Wyświetl plik

@ -36,6 +36,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='embed',
unique_together=set([('url', 'max_width')]),
unique_together={('url', 'max_width')},
),
]

Wyświetl plik

@ -71,6 +71,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='rendition',
unique_together=set([('image', 'filter', 'focal_point_key')]),
unique_together={('image', 'filter', 'focal_point_key')},
),
]

Wyświetl plik

@ -51,6 +51,6 @@ class Migration(migrations.Migration):
migrations.AlterUniqueTogether(
name='rendition',
unique_together=set([('image', 'filter_spec', 'focal_point_key')]),
unique_together={('image', 'filter_spec', 'focal_point_key')},
),
]

Wyświetl plik

@ -648,7 +648,7 @@ class AbstractRendition(ImageFileMixin, models.Model):
errors = super(AbstractRendition, cls).check(**kwargs)
if not cls._meta.abstract:
if not any(
set(constraint) == set(['image', 'filter_spec', 'focal_point_key'])
set(constraint) == {'image', 'filter_spec', 'focal_point_key'}
for constraint in cls._meta.unique_together
):
errors.append(

Wyświetl plik

@ -2115,7 +2115,7 @@ class TestGenerateURLView(TestCase, WagtailTestUtils):
# Check JSON
content_json = json.loads(response.content.decode())
self.assertEqual(set(content_json.keys()), set(['url', 'preview_url']))
self.assertEqual(set(content_json.keys()), {'url', 'preview_url'})
expected_url = 'http://localhost/images/%(signature)s/%(image_id)d/fill-800x600/' % {
'signature': urllib.parse.quote(generate_signature(self.image.id, 'fill-800x600'), safe=urlquote_safechars),

Wyświetl plik

@ -152,13 +152,13 @@ class TestFillOperation(ImageTransformOperationTestCase):
operation_class = image_operations.FillOperation
filter_spec_tests = [
('fill-800x600', dict(width=800, height=600, crop_closeness=0)),
('hello-800x600', dict(width=800, height=600, crop_closeness=0)),
('fill-800x600-c0', dict(width=800, height=600, crop_closeness=0)),
('fill-800x600-c100', dict(width=800, height=600, crop_closeness=1)),
('fill-800x600-c50', dict(width=800, height=600, crop_closeness=0.5)),
('fill-800x600-c1000', dict(width=800, height=600, crop_closeness=1)),
('fill-800000x100', dict(width=800000, height=100, crop_closeness=0)),
('fill-800x600', {"width": 800, "height": 600, "crop_closeness": 0}),
('hello-800x600', {"width": 800, "height": 600, "crop_closeness": 0}),
('fill-800x600-c0', {"width": 800, "height": 600, "crop_closeness": 0}),
('fill-800x600-c100', {"width": 800, "height": 600, "crop_closeness": 1}),
('fill-800x600-c50', {"width": 800, "height": 600, "crop_closeness": 0.5}),
('fill-800x600-c1000', {"width": 800, "height": 600, "crop_closeness": 1}),
('fill-800000x100', {"width": 800000, "height": 100, "crop_closeness": 0}),
]
filter_spec_error_tests = [
@ -173,33 +173,33 @@ class TestFillOperation(ImageTransformOperationTestCase):
run_tests = [
# Basic usage
('fill-800x600', dict(width=1000, height=1000), [
('fill-800x600', {"width": 1000, "height": 1000}, [
('crop', (0, 125, 1000, 875)),
('resize', (800, 600)),
]),
# Basic usage with an oddly-sized original image
# This checks for a rounding precision issue (#968)
('fill-200x200', dict(width=539, height=720), [
('fill-200x200', {"width": 539, "height": 720}, [
('crop', (0, 90, 539, 630)),
('resize', (200, 200)),
]),
# Closeness shouldn't have any effect when used without a focal point
('fill-800x600-c100', dict(width=1000, height=1000), [
('fill-800x600-c100', {"width": 1000, "height": 1000}, [
('crop', (0, 125, 1000, 875)),
('resize', (800, 600)),
]),
# Should always crop towards focal point. Even if no closeness is set
('fill-80x60', dict(
width=1000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
('fill-80x60', {
"width": 1000,
"height": 1000,
"focal_point_x": 1000,
"focal_point_y": 500,
"focal_point_width": 0,
"focal_point_height": 0,
}, [
# Crop the largest possible crop box towards the focal point
('crop', (0, 125, 1000, 875)),
@ -208,14 +208,14 @@ class TestFillOperation(ImageTransformOperationTestCase):
]),
# Should crop as close as possible without upscaling
('fill-80x60-c100', dict(
width=1000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
('fill-80x60-c100', {
"width": 1000,
"height": 1000,
"focal_point_x": 1000,
"focal_point_y": 500,
"focal_point_width": 0,
"focal_point_height": 0,
}, [
# Crop as close as possible to the focal point
('crop', (920, 470, 1000, 530)),
@ -224,27 +224,27 @@ class TestFillOperation(ImageTransformOperationTestCase):
# Ditto with a wide image
# Using a different filter so method name doesn't clash
('fill-100x60-c100', dict(
width=2000,
height=1000,
focal_point_x=2000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
('fill-100x60-c100', {
"width": 2000,
"height": 1000,
"focal_point_x": 2000,
"focal_point_y": 500,
"focal_point_width": 0,
"focal_point_height": 0,
}, [
# Crop to the right hand side
('crop', (1900, 470, 2000, 530)),
]),
# Make sure that the crop box never enters the focal point
('fill-50x50-c100', dict(
width=2000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=100,
focal_point_height=20,
), [
('fill-50x50-c100', {
"width": 2000,
"height": 1000,
"focal_point_x": 1000,
"focal_point_y": 500,
"focal_point_width": 100,
"focal_point_height": 20,
}, [
# Crop a 100x100 box around the entire focal point
('crop', (950, 450, 1050, 550)),
@ -253,19 +253,19 @@ class TestFillOperation(ImageTransformOperationTestCase):
]),
# Test that the image is never upscaled
('fill-1000x800', dict(width=100, height=100), [
('fill-1000x800', {"width": 100, "height": 100}, [
('crop', (0, 10, 100, 90)),
]),
# Test that the crop closeness gets capped to prevent upscaling
('fill-1000x800-c100', dict(
width=1500,
height=1000,
focal_point_x=750,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
('fill-1000x800-c100', {
"width": 1500,
"height": 1000,
"focal_point_x": 750,
"focal_point_y": 500,
"focal_point_width": 0,
"focal_point_height": 0,
}, [
# Crop a 1000x800 square out of the image as close to the
# focal point as possible. Will not zoom too far in to
# prevent upscaling
@ -275,14 +275,14 @@ class TestFillOperation(ImageTransformOperationTestCase):
# Test for an issue where a ZeroDivisionError would occur when the
# focal point size, image size and filter size match
# See: #797
('fill-1500x1500-c100', dict(
width=1500,
height=1500,
focal_point_x=750,
focal_point_y=750,
focal_point_width=1500,
focal_point_height=1500,
), [
('fill-1500x1500-c100', {
"width": 1500,
"height": 1500,
"focal_point_x": 750,
"focal_point_y": 750,
"focal_point_width": 1500,
"focal_point_height": 1500,
}, [
# This operation could probably be optimised out
('crop', (0, 0, 1500, 1500)),
]),
@ -290,25 +290,25 @@ class TestFillOperation(ImageTransformOperationTestCase):
# A few tests for single pixel images
('fill-100x100', dict(
width=1,
height=1,
), [
('fill-100x100', {
"width": 1,
"height": 1,
}, [
('crop', (0, 0, 1, 1)),
]),
# This one once gave a ZeroDivisionError
('fill-100x150', dict(
width=1,
height=1,
), [
('fill-100x150', {
"width": 1,
"height": 1,
}, [
('crop', (0, 0, 1, 1)),
]),
('fill-150x100', dict(
width=1,
height=1,
), [
('fill-150x100', {
"width": 1,
"height": 1,
}, [
('crop', (0, 0, 1, 1)),
]),
]
@ -321,8 +321,8 @@ class TestMinMaxOperation(ImageTransformOperationTestCase):
operation_class = image_operations.MinMaxOperation
filter_spec_tests = [
('min-800x600', dict(method='min', width=800, height=600)),
('max-800x600', dict(method='max', width=800, height=600)),
('min-800x600', {"method": 'min', "width": 800, "height": 600}),
('max-800x600', {"method": 'max', "width": 800, "height": 600}),
]
filter_spec_error_tests = [
@ -337,19 +337,19 @@ class TestMinMaxOperation(ImageTransformOperationTestCase):
run_tests = [
# Basic usage of min
('min-800x600', dict(width=1000, height=1000), [
('min-800x600', {"width": 1000, "height": 1000}, [
('resize', (800, 800)),
]),
# Basic usage of max
('max-800x600', dict(width=1000, height=1000), [
('max-800x600', {"width": 1000, "height": 1000}, [
('resize', (600, 600)),
]),
# Resize doesn't try to set zero height
('max-400x400', dict(width=1000, height=1), [
('max-400x400', {"width": 1000, "height": 1}, [
('resize', (400, 1)),
]),
# Resize doesn't try to set zero width
('max-400x400', dict(width=1, height=1000), [
('max-400x400', {"width": 1, "height": 1000}, [
('resize', (1, 400)),
]),
]
@ -362,8 +362,8 @@ class TestWidthHeightOperation(ImageTransformOperationTestCase):
operation_class = image_operations.WidthHeightOperation
filter_spec_tests = [
('width-800', dict(method='width', size=800)),
('height-600', dict(method='height', size=600)),
('width-800', {"method": 'width', "size": 800}),
('height-600', {"method": 'height', "size": 600}),
]
filter_spec_error_tests = [
@ -375,19 +375,19 @@ class TestWidthHeightOperation(ImageTransformOperationTestCase):
run_tests = [
# Basic usage of width
('width-400', dict(width=1000, height=500), [
('width-400', {"width": 1000, "height": 500}, [
('resize', (400, 200)),
]),
# Basic usage of height
('height-400', dict(width=1000, height=500), [
('height-400', {"width": 1000, "height": 500}, [
('resize', (800, 400)),
]),
# Resize doesn't try to set zero height
('width-400', dict(width=1000, height=1), [
('width-400', {"width": 1000, "height": 1}, [
('resize', (400, 1)),
]),
# Resize doesn't try to set zero width
('height-400', dict(width=1, height=800), [
('height-400', {"width": 1, "height": 800}, [
('resize', (1, 400)),
]),
]
@ -400,8 +400,8 @@ class TestScaleOperation(ImageTransformOperationTestCase):
operation_class = image_operations.ScaleOperation
filter_spec_tests = [
('scale-100', dict(method='scale', percent=100)),
('scale-50', dict(method='scale', percent=50)),
('scale-100', {"method": 'scale', "percent": 100}),
('scale-50', {"method": 'scale', "percent": 50}),
]
filter_spec_error_tests = [
@ -413,23 +413,23 @@ class TestScaleOperation(ImageTransformOperationTestCase):
run_tests = [
# Basic almost a no-op of scale
('scale-100', dict(width=1000, height=500), [
('scale-100', {"width": 1000, "height": 500}, [
('resize', (1000, 500)),
]),
# Basic usage of scale
('scale-50', dict(width=1000, height=500), [
('scale-50', {"width": 1000, "height": 500}, [
('resize', (500, 250)),
]),
# Rounded usage of scale
('scale-83.0322', dict(width=1000, height=500), [
('scale-83.0322', {"width": 1000, "height": 500}, [
('resize', (int(1000 * 0.830322), int(500 * 0.830322))),
]),
# Resize doesn't try to set zero height
('scale-50', dict(width=1000, height=1), [
('scale-50', {"width": 1000, "height": 1}, [
('resize', (500, 1)),
]),
# Resize doesn't try to set zero width
('scale-50', dict(width=1, height=500), [
('scale-50', {"width": 1, "height": 500}, [
('resize', (1, 250)),
]),
]

Wyświetl plik

@ -51,10 +51,10 @@ class BaseSearchQueryCompiler:
def _get_filterable_field(self, field_attname):
# Get field
field = dict(
(field.get_attname(self.queryset.model), field)
field = {
field.get_attname(self.queryset.model): field
for field in self.queryset.model.get_filterable_search_fields()
).get(field_attname, None)
}.get(field_attname, None)
return field

Wyświetl plik

@ -30,7 +30,7 @@ def get_boosts():
def determine_boosts_weights(boosts=()):
if not boosts:
boosts = get_boosts()
boosts = list(sorted(boosts, reverse=True))
boosts = sorted(boosts, reverse=True)
min_boost = boosts[-1]
if len(boosts) <= WEIGHTS_COUNT:
return list(zip_longest(boosts, WEIGHTS, fillvalue=min(min_boost, 0)))

Wyświetl plik

@ -15,7 +15,7 @@ class BM25(Func):
def __init__(
self
):
expressions = tuple()
expressions = ()
super().__init__(*expressions)
def as_sql(self, compiler: SQLCompiler, connection: BaseDatabaseWrapper, function=None, template=None):

Wyświetl plik

@ -196,9 +196,9 @@ class Elasticsearch5Mapping:
def get_mapping(self):
# Make field list
fields = {
'pk': dict(type=self.keyword_type, store=True, include_in_all=False),
'content_type': dict(type=self.keyword_type, include_in_all=False),
self.edgengrams_field_name: dict(type=self.text_type, include_in_all=False),
'pk': {"type": self.keyword_type, "store": True, "include_in_all": False},
'content_type': {"type": self.keyword_type, "include_in_all": False},
self.edgengrams_field_name: {"type": self.text_type, "include_in_all": False},
}
fields[self.edgengrams_field_name].update(self.edgengram_analyzer_config)
@ -208,9 +208,10 @@ class Elasticsearch5Mapping:
fields['pk']['index'] = 'not_analyzed'
fields['content_type']['index'] = 'not_analyzed'
fields.update(dict(
self.get_field_mapping(field) for field in self.model.get_search_fields()
))
fields.update({
self.get_field_mapping(field)[0]: self.get_field_mapping(field)[1]
for field in self.model.get_search_fields()
})
return {
self.get_document_type(): {
@ -239,7 +240,7 @@ class Elasticsearch5Mapping:
def get_document(self, obj):
# Build document
doc = dict(pk=str(obj.pk), content_type=self.get_all_content_types())
doc = {"pk": str(obj.pk), "content_type": self.get_all_content_types()}
edgengrams = []
for field in self.model.get_search_fields():
value = field.get_value(obj)

Wyświetl plik

@ -46,7 +46,7 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='querydailyhits',
unique_together=set([('query', 'date')]),
unique_together={('query', 'date')},
),
migrations.AddField(
model_name='editorspick',

Wyświetl plik

@ -90,7 +90,7 @@ class ElasticsearchCommonSearchBackendTests(BackendTests):
# Even though they both start with "Java", this should not match the "JavaScript" books
results = self.backend.search("JavaBeans", models.Book)
self.assertSetEqual(set(r.title for r in results), set())
self.assertSetEqual({r.title for r in results}, set())
def test_search_with_hyphen(self):
"""

Wyświetl plik

@ -52,7 +52,7 @@ class BackendTests(WagtailTestUtils):
Note: This is different to assertSetEqual in that duplicate results are taken
into account.
"""
self.assertListEqual(list(sorted(a)), list(sorted(b)))
self.assertListEqual(sorted(a), sorted(b))
# SEARCH TESTS
@ -425,7 +425,7 @@ class BackendTests(WagtailTestUtils):
# Offset the results
results = results[3:]
self.assertListEqual(list(r.title for r in results), [
self.assertListEqual([r.title for r in results], [
"The Fellowship of the Ring",
"The Return of the King",
"A Game of Thrones",
@ -531,7 +531,7 @@ class BackendTests(WagtailTestUtils):
# Limit the results
results = results[:3]
self.assertEqual(list(r.title for r in results), [
self.assertEqual([r.title for r in results], [
# "Foundation"
"The Hobbit",
"The Two Towers",

Wyświetl plik

@ -33,7 +33,7 @@ class TestContentTypeNames(TestCase):
class TestSearchFields(TestCase):
def make_dummy_type(self, search_fields):
return type(str('DummyType'), (index.Indexed, ), dict(search_fields=search_fields))
return type(str('DummyType'), (index.Indexed, ), {"search_fields": search_fields})
def get_checks_result(warning_id=None):
"""Run Django checks on any with the 'search' tag used when registering the check"""

Wyświetl plik

@ -26,6 +26,6 @@ class Migration(migrations.Migration):
),
migrations.AlterUniqueTogether(
name='customrendition',
unique_together=set([('image', 'filter_spec', 'focal_point_key')]),
unique_together={('image', 'filter_spec', 'focal_point_key')},
),
]

Wyświetl plik

@ -15,7 +15,7 @@ class WagtailTestUtils:
"""
user_model = get_user_model()
# Create a user
user_data = dict()
user_data = {}
user_data[user_model.USERNAME_FIELD] = 'test@email.com'
user_data['email'] = 'test@email.com'
user_data['password'] = 'password'

Wyświetl plik

@ -20,7 +20,7 @@ from wagtail.core.models import (
User = get_user_model()
# The standard fields each user model is expected to have, as a minimum.
standard_fields = set(['email', 'first_name', 'last_name', 'is_superuser', 'groups'])
standard_fields = {'email', 'first_name', 'last_name', 'is_superuser', 'groups'}
# Custom fields
if hasattr(settings, 'WAGTAIL_USER_CUSTOM_FIELDS'):
custom_fields = set(settings.WAGTAIL_USER_CUSTOM_FIELDS)
@ -172,7 +172,7 @@ class UserForm(UsernameForm):
class UserCreationForm(UserForm):
class Meta:
model = User
fields = set([User.USERNAME_FIELD]) | standard_fields | custom_fields
fields = {User.USERNAME_FIELD} | standard_fields | custom_fields
widgets = {
'groups': forms.CheckboxSelectMultiple
}
@ -191,7 +191,7 @@ class UserEditForm(UserForm):
class Meta:
model = User
fields = set([User.USERNAME_FIELD, "is_active"]) | standard_fields | custom_fields
fields = {User.USERNAME_FIELD, "is_active"} | standard_fields | custom_fields
widgets = {
'groups': forms.CheckboxSelectMultiple
}

Wyświetl plik

@ -1516,7 +1516,7 @@ class TestGroupEditView(TestCase, WagtailTestUtils):
)
self.assertEqual(
set(page_permissions_formset.forms[0]['permission_types'].value()),
set(['add', 'edit'])
{'add', 'edit'}
)
# add edit permission on home
@ -1537,7 +1537,7 @@ class TestGroupEditView(TestCase, WagtailTestUtils):
)
self.assertEqual(
set(page_permissions_formset.forms[0]['permission_types'].value()),
set(['add', 'edit'])
{'add', 'edit'}
)
self.assertEqual(
page_permissions_formset.forms[1]['page'].value(),