From f113974adea9f442b4de7aee26f834a7f316392c Mon Sep 17 00:00:00 2001 From: Tobias McNulty Date: Fri, 1 Sep 2017 18:59:36 -0400 Subject: [PATCH] add (optional) support for Elasticsearch --- bakerydemo/search/views.py | 34 ++++++++++++----------- bakerydemo/settings/production.py | 45 +++++++++++++++++++++++++++++-- docker-compose.yml | 10 ++++++- requirements/base.txt | 7 ++--- 4 files changed, 74 insertions(+), 22 deletions(-) diff --git a/bakerydemo/search/views.py b/bakerydemo/search/views.py index 6739212..83db25a 100644 --- a/bakerydemo/search/views.py +++ b/bakerydemo/search/views.py @@ -1,3 +1,4 @@ +from django.conf import settings from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.shortcuts import render @@ -13,25 +14,26 @@ def search(request): # Search search_query = request.GET.get('q', None) if search_query: - """ - Because we can't use ElasticSearch for the demo, we use the native db search. - But native DB search can't search specific fields in our models on a `Page` query. - So for demo purposes ONLY, we hard-code in the model names we want to search. - In production, use ElasticSearch and a simplified search query, per - http://docs.wagtail.io/en/v1.8.1/topics/search/searching.html - """ + if 'elasticsearch' in settings.WAGTAILSEARCH_BACKENDS['default']['BACKEND']: + # In production, use ElasticSearch and a simplified search query, per + # http://docs.wagtail.io/en/v1.12.1/topics/search/backends.html + # like this: + search_results = Page.objects.live().search(search_query) + else: + # If we aren't using ElasticSearch for the demo, fall back to native db search. + # But native DB search can't search specific fields in our models on a `Page` query. + # So for demo purposes ONLY, we hard-code in the model names we want to search. + blog_results = BlogPage.objects.live().search(search_query) + blog_page_ids = [p.page_ptr.id for p in blog_results] - blog_results = BlogPage.objects.live().search(search_query) - blog_page_ids = [p.page_ptr.id for p in blog_results] + bread_results = BreadPage.objects.live().search(search_query) + bread_page_ids = [p.page_ptr.id for p in bread_results] - bread_results = BreadPage.objects.live().search(search_query) - bread_page_ids = [p.page_ptr.id for p in bread_results] + location_results = LocationPage.objects.live().search(search_query) + location_result_ids = [p.page_ptr.id for p in location_results] - location_results = LocationPage.objects.live().search(search_query) - location_result_ids = [p.page_ptr.id for p in location_results] - - page_ids = blog_page_ids + bread_page_ids + location_result_ids - search_results = Page.objects.live().filter(id__in=page_ids) + page_ids = blog_page_ids + bread_page_ids + location_result_ids + search_results = Page.objects.live().filter(id__in=page_ids) query = Query.get(search_query) diff --git a/bakerydemo/settings/production.py b/bakerydemo/settings/production.py index 0249cbd..6951e3a 100644 --- a/bakerydemo/settings/production.py +++ b/bakerydemo/settings/production.py @@ -34,6 +34,49 @@ BASE_URL = 'http://localhost:8000' db_from_env = dj_database_url.config(conn_max_age=500) DATABASES['default'].update(db_from_env) +# AWS creds may be used for S3 and/or Elasticsearch +AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID', '') +AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY', '') +AWS_REGION = os.getenv('AWS_REGION', '') + +# Configure Elasticsearch, if present in os.environ +if 'ELASTICSEARCH_ENDPOINT' in os.environ: + from elasticsearch import RequestsHttpConnection + WAGTAILSEARCH_BACKENDS = { + 'default': { + 'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch2', + 'HOSTS': [{ + 'host': os.getenv('ELASTICSEARCH_ENDPOINT', ''), + 'port': os.getenv('ELASTICSEARCH_PORT', '9200'), + }], + 'connection_class': RequestsHttpConnection, + } + } + + if AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY: + from requests_aws4auth import AWS4Auth + WAGTAILSEARCH_BACKENDS['default']['http_auth'] = AWS4Auth( + AWS_ACCESS_KEY_ID, + AWS_SECRET_ACCESS_KEY, + AWS_REGION, + 'es' + ) + elif AWS_REGION: + # No API keys in the environ, so attempt to discover them with Boto instead, per: + # http://boto3.readthedocs.io/en/latest/guide/configuration.html#configuring-credentials + # This may be useful if your credentials are obtained via EC2 instance meta data. + from botocore.session import Session + from requests_aws4auth import AWS4Auth + aws_creds = Session().get_credentials() + if aws_creds: + WAGTAILSEARCH_BACKENDS['default']['http_auth'] = AWS4Auth( + aws_creds.access_key, + aws_creds.secret_key, + AWS_REGION, + 'es', + aws_creds.token, + ) + # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ @@ -42,8 +85,6 @@ STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' if 'AWS_STORAGE_BUCKET_NAME' in os.environ: AWS_STORAGE_BUCKET_NAME = os.getenv('AWS_STORAGE_BUCKET_NAME') - AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID', '') - AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY', '') AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME AWS_AUTO_CREATE_BUCKET = True diff --git a/docker-compose.yml b/docker-compose.yml index a1e5e73..b96f0b4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,19 +15,27 @@ services: image: redis:3.0 expose: - "6379" + elasticsearch: + image: elasticsearch:2.3 + restart: always + expose: + - "9200" app: environment: DJANGO_SECRET_KEY: changeme DATABASE_URL: postgres://app_user:changeme@db/app_db - REDIS_URL: redis://redis + CACHE_URL: redis://redis + ELASTICSEARCH_ENDPOINT: elasticsearch build: context: . dockerfile: ./Dockerfile links: - db:db - redis:redis + - elasticsearch:elasticsearch ports: - "8000:8000" depends_on: - db - redis + - elasticsearch diff --git a/requirements/base.txt b/requirements/base.txt index 3234076..0715fb3 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,8 +1,9 @@ Django==1.11.3 django-dotenv==1.4.1 -# http://docs.wagtail.io/en/v1.8.1/topics/search/backends.html#elasticsearch-backend -# Not utilized by default; uncomment and review the above document if you require elasticsearch -# elasticsearch==5.1.0 +# elasticsearch==2.3.0 chosen for compatibility with t2.micro.elasticsearch and t2.small.elasticsearch +# instance types on AWS. Adjust for your deployment as needed. +elasticsearch==2.3.0 +requests-aws4auth==0.9 wagtail==1.12 wagtailfontawesome==1.0.6 Pillow==4.0.0