Implemented suggestions from @chosak

pull/3794/merge
Karl Hobley 2017-09-29 10:32:47 +01:00 zatwierdzone przez Matt Westcott
rodzic de3c37639b
commit 0cb63c72c5
4 zmienionych plików z 112 dodań i 89 usunięć

Wyświetl plik

@ -145,75 +145,18 @@ By default, Wagtail will only purge one URL per page. If your page has more than
for page_number in range(1, self.get_blog_items().num_pages + 1):
yield '/?page=' + str(page_number)
Invalidating index pages
^^^^^^^^^^^^^^^^^^^^^^^^
Another problem is pages that list other pages (such as a blog index) will not
be purged when a blog entry gets added, changed or deleted. You may want to
purge the blog index page so the updates are added into the listing quickly.
Pages that list other pages (such as a blog index) may need to be purged as
well so any changes to a blog page is also reflected on the index (for example,
a blog post was added, deleted or its title/thumbnail was changed).
This can be solved by using the ``purge_page_from_cache`` utility function
which can be found in the ``wagtail.contrib.wagtailfrontendcache.utils`` module.
Let's take the the above BlogIndexPage as an example. We need to register a
signal handler to run when one of the BlogPages get updated/deleted. This
signal handler should call the ``purge_page_from_cache`` function on all
BlogIndexPages that contain the BlogPage being updated/deleted.
.. code-block:: python
# models.py
from django.dispatch import receiver
from django.db.models.signals import pre_delete
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
...
def blog_page_changed(blog_page):
# Find the blog index and purge it. Assuming there is just one on the site.
blog_index = BlogIndexPage.objects.first()
purge_page_from_cache(blog_index)
@receiver(page_published, sender=BlogPage):
def blog_published_handler(instance):
blog_page_changed(instance)
@receiver(pre_delete, sender=BlogPage)
def blog_deleted_handler(instance):
blog_page_changed(instance)
Invalidating individual URLs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``wagtail.contrib.wagtailfrontendcache.utils`` provides a function called ``purge_url_from_cache``. As the name suggests, this purges an individual URL from the cache.
For example, this could be useful for purging a single page of blogs:
.. code-block:: python
from wagtail.contrib.wagtailfrontendcache.utils import purge_url_from_cache
# Purge the first page of the blog index
purge_url_from_cache(blog_index.url + '?page=1')
Invaliding many URLs in batches
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. versionadded:: 1.13
If you have many pages/URLs that need to be purged at the same time, it's more
perfomant to use Wagtail's ``PurgeBatch`` class to build up a list of pages/URLs
to purge, and purge them all in a single request.
To purge these pages, we need to write a signal handler that listens for
Wagtail's ``page_published`` and ``page_unpublished`` signals for blog pages
(note, ``page_published`` is called both when a page is created and updated).
This signal handler would trigger the invalidation of the index page using the
``PurgeBatch`` class which is used to construct and dispatch invalidation requests.
.. code-block:: python
@ -224,10 +167,8 @@ to purge, and purge them all in a single request.
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import PurgeBatch
...
def blog_page_changed(blog_page):
# Find all the live BlogIndexPages that contain this blog_page
batch = PurgeBatch()
@ -249,7 +190,30 @@ to purge, and purge them all in a single request.
blog_page_changed(instance)
All of the methods available on ``PurgeBatch`` are listed bellow:
Invalidating URLs
^^^^^^^^^^^^^^^^^
The ``PurgeBatch`` class provides a ``.add_url(url)`` and a ``.add_urls(urls)``
for adding individual URLs to the purge batch.
For example, this could be useful for purging a single page on a blog index:
.. code-block:: python
from wagtail.contrib.wagtailfrontendcache.utils import PurgeBatch
# Purge the first page of the blog index
batch = PurgeBatch()
batch.add_url(blog_index.url + '?page=1')
batch.purge()
The ``PurgeBatch`` class
^^^^^^^^^^^^^^^^^^^^^^^^
.. versionadded:: 1.13
All of the methods available on ``PurgeBatch`` are listed below:
.. automodule:: wagtail.contrib.wagtailfrontendcache.utils
.. autoclass:: PurgeBatch

Wyświetl plik

@ -97,15 +97,18 @@ class CloudflareBackend(BaseBackend):
if response.status_code != 200:
response.raise_for_status()
else:
logger.error("Couldn't purge from Cloudflare. Unexpected JSON parse error.")
for url in urls:
logger.error("Couldn't purge '%s' from Cloudflare. Unexpected JSON parse error.", url)
except requests.exceptions.HTTPError as e:
logger.error("Couldn't purge from Cloudflare. HTTPError: %d %s", e.response.status_code, e.message)
for url in urls:
logger.error("Couldn't purge '%s' from Cloudflare. HTTPError: %d %s", url, e.response.status_code, e.message)
return
if response_json['success'] is False:
error_messages = ', '.join([str(err['message']) for err in response_json['errors']])
logger.error("Couldn't purge from Cloudflare. Cloudflare errors '%s'", error_messages)
for url in urls:
logger.error("Couldn't purge '%s' from Cloudflare. Cloudflare errors '%s'", url, error_messages)
return
def purge(self, url):
@ -166,6 +169,11 @@ class CloudfrontBackend(BaseBackend):
}
)
except botocore.exceptions.ClientError as e:
logger.error(
"Couldn't purge from CloudFront. ClientError: %s %s", e.response['Error']['Code'],
e.response['Error']['Message'])
for path in paths:
logger.error(
"Couldn't purge path '%s' from CloudFront (DistributionId=%s). ClientError: %s %s",
path,
distribution_id,
e.response['Error']['Code'],
e.response['Error']['Message']
)

Wyświetl plik

@ -150,30 +150,28 @@ class TestCachePurgingFunctions(TestCase):
fixtures = ['test.json']
def setUp(self):
# Reset PURGED_URLS to an empty list
PURGED_URLS[:] = []
def test_purge_url_from_cache(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
purge_url_from_cache('http://localhost/foo')
self.assertEqual(PURGED_URLS, ['http://localhost/foo'])
def test_purge_urls_from_cache(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
purge_urls_from_cache(['http://localhost/foo', 'http://localhost/bar'])
self.assertEqual(PURGED_URLS, ['http://localhost/foo', 'http://localhost/bar'])
def test_purge_page_from_cache(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
page = EventIndex.objects.get(url_path='/home/events/')
purge_page_from_cache(page)
self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/'])
def test_purge_pages_from_cache(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
purge_pages_from_cache(EventIndex.objects.all())
self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/'])
def test_purge_batch(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
batch = PurgeBatch()
page = EventIndex.objects.get(url_path='/home/events/')
batch.add_page(page)
@ -192,22 +190,66 @@ class TestCachePurgingSignals(TestCase):
fixtures = ['test.json']
def setUp(self):
# Reset PURGED_URLS to an empty list
PURGED_URLS[:] = []
def test_purge_on_publish(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
page = EventIndex.objects.get(url_path='/home/events/')
page.save_revision().publish()
self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/'])
def test_purge_on_unpublish(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
page = EventIndex.objects.get(url_path='/home/events/')
page.unpublish()
self.assertEqual(PURGED_URLS, ['http://localhost/events/', 'http://localhost/events/past/'])
def test_purge_with_unroutable_page(self):
PURGED_URLS[:] = [] # reset PURGED_URLS to the empty list
root = Page.objects.get(url_path='/')
page = EventIndex(title='new top-level page')
root.add_child(instance=page)
page.save_revision().publish()
self.assertEqual(PURGED_URLS, [])
class TestPurgeBatchClass(TestCase):
# Tests the .add_*() methods on PurgeBatch. The .purge() method is tested
# by TestCachePurgingFunctions.test_purge_batch above
fixtures = ['test.json']
def test_add_url(self):
batch = PurgeBatch()
batch.add_url('http://localhost/foo')
self.assertEqual(batch.urls, ['http://localhost/foo'])
def test_add_urls(self):
batch = PurgeBatch()
batch.add_urls(['http://localhost/foo', 'http://localhost/bar'])
self.assertEqual(batch.urls, ['http://localhost/foo', 'http://localhost/bar'])
def test_add_page(self):
page = EventIndex.objects.get(url_path='/home/events/')
batch = PurgeBatch()
batch.add_page(page)
self.assertEqual(batch.urls, ['http://localhost/events/', 'http://localhost/events/past/'])
def test_add_pages(self):
batch = PurgeBatch()
batch.add_pages(EventIndex.objects.all())
self.assertEqual(batch.urls, ['http://localhost/events/', 'http://localhost/events/past/'])
def test_multiple_calls(self):
page = EventIndex.objects.get(url_path='/home/events/')
batch = PurgeBatch()
batch.add_page(page)
batch.add_url('http://localhost/foo')
batch.purge()
self.assertEqual(batch.urls, ['http://localhost/events/', 'http://localhost/events/past/', 'http://localhost/foo'])

Wyświetl plik

@ -75,15 +75,13 @@ def _get_page_cached_urls(page):
return []
return [
page_url + path[1:]
page_url + path.lstrip('/')
for path in page.specific.get_cached_paths()
]
def purge_page_from_cache(page, backend_settings=None, backends=None):
urls = _get_page_cached_urls(page)
purge_urls_from_cache(urls, backend_settings, backends)
purge_pages_from_cache([page], backend_settings=backend_settings, backends=backends)
def purge_pages_from_cache(pages, backend_settings=None, backends=None):
@ -91,7 +89,8 @@ def purge_pages_from_cache(pages, backend_settings=None, backends=None):
for page in pages:
urls.extend(_get_page_cached_urls(page))
purge_urls_from_cache(urls, backend_settings, backends)
if urls:
purge_urls_from_cache(urls, backend_settings, backends)
class PurgeBatch(object):
@ -135,5 +134,15 @@ class PurgeBatch(object):
self.add_page(page)
def purge(self, backend_settings=None, backends=None):
"""Performs the purge of all the URLs in this batch"""
"""
Performs the purge of all the URLs in this batch
This method takes two optional keyword arguments: backend_settings and backends
- backend_settings can be used to override the WAGTAILFRONTENDCACHE setting for
just this call
- backends can be set to a list of backend names. When set, the invalidation request
will only be sent to these backends
"""
purge_urls_from_cache(self.urls, backend_settings, backends)