add readme and fix conflict

pull/80/head
meomancer 2020-01-14 15:21:21 +07:00
commit 72aeddc43b
18 zmienionych plików z 911 dodań i 782 usunięć

Wyświetl plik

@ -16,7 +16,6 @@ status:
@echo "------------------------------------------------------------------"
@docker-compose -f $(COMPOSE_FILE) -p $(PROJECT_ID) ps
build:
@echo
@echo "------------------------------------------------------------------"
@ -81,24 +80,6 @@ live_logs:
@docker-compose -f $(COMPOSE_FILE) -p $(PROJECT_ID) logs -f
###
# CLIPPING
###
import_clip:
@echo
@echo "------------------------------------------------------------------"
@echo "Importing clip shapefile into the database"
@echo "------------------------------------------------------------------"
@docker exec -t -i $(PROJECT_ID)_imposm /usr/bin/ogr2ogr -progress -skipfailures -lco GEOMETRY_NAME=geom -nlt PROMOTE_TO_MULTI -f PostgreSQL PG:"host=db user=docker password=docker dbname=gis" /home/settings/clip/clip.shp
remove_clip:
@echo
@echo "------------------------------------------------------------------"
@echo "Removing clip shapefile from the database"
@echo "------------------------------------------------------------------"
@docker exec -t -i $(PROJECT_ID)_db /bin/su - postgres -c "psql gis -c 'DROP TABLE IF EXISTS clip;'"
###
# STATS
@ -112,12 +93,13 @@ timestamp:
@echo "------------------------------------------------------------------"
@docker exec -t -i $(PROJECT_ID)_imposm cat /home/settings/timestamp.txt
###
# STYLES
###
import_styles: remove_styles
import_styles: import_styles
@echo
@echo "------------------------------------------------------------------"
@echo "Importing QGIS styles"

Wyświetl plik

@ -6,15 +6,37 @@ version: '2.1'
services:
qgisserver:
image: kartoza/qgis-server:2.18
image: camptocamp/qgis-server:3.6
hostname: dockerosm_qgisserver
container_name: dockerosm_qgisserver
environment:
- QGIS_PROJECT_FILE=/project/project.qgs
- GIS_SERVER_LOG_LEVEL=DEBUG
- MAX_REQUESTS_PER_PROCESS=100
volumes:
- ./logs:/var/log/apache2
- ./web:/project
- ./settings:/web/settings
depends_on:
db:
condition: service_healthy
links:
- db:db
ports:
- 8198:80
restart: unless-stopped
restart: on-failure
# Server vector tiles from PostgreSQL DB
martin:
image: urbica/martin
hostname: dockerosm_martin
container_name: dockerosm_martin
restart: on-failure
ports:
- 3000:3000
environment:
- WATCH_MODE=true
- DATABASE_URL=postgres://docker:docker@db/gis
depends_on:
db:
condition: service_healthy

Wyświetl plik

@ -5,11 +5,13 @@ volumes:
import_done:
import_queue:
cache:
osm_settings:
pgadmin_data:
services:
db:
# About the postgresql version, it should match in the dockerfile of docker-imposm3
image: kartoza/postgis:9.6-2.4
image: kartoza/postgis:12.0
hostname: db
container_name: dockerosm_db
environment:
@ -19,20 +21,35 @@ services:
# Uncomment to expose the postgis database on the network
# - ALLOW_IP_RANGE= 0.0.0.0/0
volumes:
- 'osm-postgis-data:/var/lib/postgresql'
- osm-postgis-data:/var/lib/postgresql
# Uncomment to use the postgis database from outside the docker network
# ports:
# - "35432:5432"
healthcheck:
test: "exit 0"
osm_downloader:
image: kartoza/docker-osm:pbf-downloader
build: docker-osm-pbf
container_name: dockerosm_pbf_download
volumes:
# These are sharable to other containers
- osm_settings:/home/settings
environment:
# Read the README in docker-osm-pbf
- CONTINENT=africa
- COUNTRY=south-africa
- BASE_URL=http://download.geofabrik.de
- MAPPING_URL=https://raw.githubusercontent.com/kartoza/docker-osm/develop/settings
- GEOJSON_URL=''
imposm:
image: kartoza/docker-osm:imposm-latest
build: docker-imposm3
container_name: dockerosm_imposm
volumes:
# These are sharable to other containers
- ./settings:/home/settings
- osm_settings:/home/settings
- import_done:/home/import_done
- import_queue:/home/import_queue
- cache:/home/cache
@ -68,8 +85,9 @@ services:
- DBSCHEMA_BACKUP=backup
# Install some styles if you are using the default mapping. It can be 'yes' or 'no'
- QGIS_STYLE=yes
# Use clip in the database
# Use clip in the database - To use this you should have run make import_clip to add your clip to the DB
- CLIP=no
command: bash -c "while [ ! -f /home/settings/country.pbf ] ; do sleep 1; done && python3 -u /home/importer.py"
osmupdate:
build: docker-osmupdate
@ -77,7 +95,7 @@ services:
container_name: dockerosm_osmupdate
volumes:
# These are sharable to other containers
- ./settings:/home/settings
- osm_settings:/home/settings
- import_done:/home/import_done
- import_queue:/home/import_queue
- cache:/home/cache
@ -104,6 +122,22 @@ services:
# seconds between 2 executions of the script
# if 0, then no update will be done, only the first initial import from the PBF
- TIME=120
command: bash -c "while [ ! -f /home/settings/country.pbf ] ; do sleep 1; done && python3 -u /home/download.py"
pgadmin4:
image: dpage/pgadmin4:4.16
hostname: pgadmin4
volumes:
- pgadmin_data:/var/lib/pgadmin
environment:
- PGADMIN_DEFAULT_EMAIL=docker@gmail.com
- PGADMIN_DEFAULT_PASSWORD=docker
ports:
- 6500:80
restart: on-failure
depends_on:
db:
condition: service_healthy
osmenrich:
build: docker-osmenrich

Wyświetl plik

@ -1,9 +1,12 @@
FROM golang:1.10
MAINTAINER Etienne Trimaille <etienne.trimaille@gmail.com>
RUN apt-get update
RUN wget -q https://www.postgresql.org/media/keys/ACCC4CF8.asc -O - | apt-key add -
RUN sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt/ stretch-pgdg main" >> /etc/apt/sources.list.d/pgdg.list'
RUN apt update && apt install -y python3-pip \
libprotobuf-dev libleveldb-dev libgeos-dev \
libpq-dev python3-dev postgresql-client-9.6 python-setuptools \
libpq-dev python3-dev postgresql-client-11 python-setuptools \
gdal-bin \
--no-install-recommends

Wyświetl plik

@ -54,10 +54,8 @@ class Importer(object):
self.osm_file = None
self.mapping_file = None
self.post_import_file = None
self.clip_shape_file = None
self.clip_sql_file = None
self.clip_json_file = None
self.qgis_style = None
self.cursor = None
self.postgis_uri = None
@ -88,20 +86,18 @@ class Importer(object):
self.error(msg)
else:
self.info('Detect SRID: ' + self.default['SRID'])
# Check valid CLIP.
if self.default['CLIP'] not in ['yes', 'no']:
msg = 'CLIP not supported : %s' % self.default['CLIP']
self.error(msg)
else:
self.info('Clip: ' + self.default['CLIP'])
# Check valid QGIS_STYLE.
if self.default['QGIS_STYLE'] not in ['yes', 'no']:
msg = 'QGIS_STYLE not supported : %s' % self.default['QGIS_STYLE']
self.error(msg)
else:
self.info('Qgis style: ' + self.default['QGIS_STYLE'])
self.info('QGIS style: ' + self.default['QGIS_STYLE'])
# Check folders.
folders = ['IMPORT_QUEUE', 'IMPORT_DONE', 'SETTINGS', 'CACHE']
@ -131,17 +127,12 @@ class Importer(object):
if f == 'post-pbf-import.sql':
self.post_import_file = join(self.default['SETTINGS'], f)
if f == 'clip.geojson':
self.clip_json_file = join(self.default['SETTINGS'], f)
if f == 'qgis_style.sql':
self.qgis_style = join(self.default['SETTINGS'], f)
if f == 'clip':
clip_folder = join(self.default['SETTINGS'], f)
for clip_file in listdir(clip_folder):
if clip_file == 'clip.shp':
self.clip_shape_file = join(clip_folder, clip_file)
if clip_file == 'clip.sql':
self.clip_sql_file = join(clip_folder, clip_file)
if not self.osm_file:
msg = 'OSM file *.pbf is missing in %s' % self.default['SETTINGS']
self.error(msg)
@ -155,9 +146,13 @@ class Importer(object):
self.info('Mapping: ' + self.osm_file)
if not self.post_import_file:
self.info('No custom SQL files *.sql detected in %s' % self.default['SETTINGS'])
self.info('No custom SQL files post-pbf-import.sql detected in %s' % self.default['SETTINGS'])
else:
self.info('SQL Post Import: ' + self.post_import_file)
if not self.clip_json_file:
self.info('No json files to limit import detected in %s' % self.default['SETTINGS'])
else:
self.info('Geojson Initial Import Clip: ' + self.clip_json_file)
if not self.qgis_style and self.default['QGIS_STYLE'] == 'yes':
msg = 'qgis_style.sql is missing in %s and QGIS_STYLE = yes.' % self.default['SETTINGS']
@ -167,14 +162,13 @@ class Importer(object):
else:
self.info('Not using QGIS default styles.')
if not self.clip_shape_file and self.default['CLIP'] == 'yes':
msg = 'clip.shp is missing and CLIP = yes.'
if not self.clip_json_file and self.default['CLIP'] == 'yes':
msg = 'clip.geojson is missing and CLIP = yes.'
self.error(msg)
elif self.clip_shape_file and self.default['QGIS_STYLE']:
self.info('Shapefile for clipping: ' + self.clip_shape_file)
self.info('SQL Clipping function: ' + self.clip_sql_file)
elif self.clip_json_file and self.default['QGIS_STYLE']:
self.info('Geojson for clipping: ' + self.clip_json_file)
else:
self.info('No *.shp detected in %s, so no clipping.' % self.default['SETTINGS'])
self.info('No *.geojson detected, so no clipping.')
# In docker-compose, we should wait for the DB is ready.
self.info('The checkup is OK.')
@ -204,9 +198,7 @@ class Importer(object):
self.default['POSTGRES_PASS']))
self.cursor = connection.cursor()
except OperationalError as e:
print(stderr.write(e))
exit()
self.error(e)
self.postgis_uri = 'postgis://%s:%s@%s/%s' % (
self.default['POSTGRES_USER'],
@ -234,56 +226,37 @@ class Importer(object):
command += ['-f', self.qgis_style]
call(command)
def _import_clip_function(self):
"""Create function clean_tables().
The user must import the clip shapefile to the database!
"""
self.info('Import clip SQL function.')
command = ['psql']
command += ['-h', self.default['POSTGRES_HOST']]
command += ['-U', self.default['POSTGRES_USER']]
command += ['-d', self.default['POSTGRES_DBNAME']]
command += ['-f', self.clip_sql_file]
call(command)
self.info('!! Be sure to run \'make import_clip\' to import the shapefile into the DB !!')
def perform_clip_in_db(self):
"""Perform clipping if the clip table is here."""
if self.count_table('clip') == 1:
self.info('Clipping')
command = ['psql']
command += ['-h', self.default['POSTGRES_HOST']]
command += ['-U', self.default['POSTGRES_USER']]
command += ['-d', self.default['POSTGRES_DBNAME']]
command += ['-c', 'SELECT clean_tables();']
call(command)
def count_table(self, name):
"""Check if there is a table starting with name."""
sql = 'select count(*) ' \
'from information_schema.tables ' \
'where table_name like \'%s\';' % name
def locate_table(self, name):
"""Check for tables in the DB table exists in the DB"""
sql = """ SELECT EXISTS (SELECT 1 AS result from information_schema.tables where table_name like 'TEMP_TABLE'); """
self.cursor.execute(sql.replace('TEMP_TABLE', '%s' % name))
# noinspection PyUnboundLocalVariable
self.cursor.execute(sql)
return self.cursor.fetchone()[0]
def run(self):
"""First checker."""
osm_tables = self.count_table('osm_%')
if osm_tables < 1:
osm_tables = self.locate_table('osm_%')
if osm_tables != 1:
# It means that the DB is empty. Let's import the PBF file.
self._first_pbf_import()
if self.clip_json_file:
self._first_pbf_import(['-limitto', self.clip_json_file])
else:
self._first_pbf_import([])
else:
self.info(
'The database is not empty. Let\'s import only diff files.')
if self.default['TIME'] != '0':
self._import_diff()
if self.clip_json_file:
self._import_diff(['-limitto', self.clip_json_file])
else:
self._import_diff([])
else:
self.info('No more update to the database. Leaving.')
def _first_pbf_import(self):
def _first_pbf_import(self, args):
"""Run the first PBF import into the database."""
command = ['imposm', 'import', '-diff', '-deployproduction']
command += ['-overwritecache', '-cachedir', self.default['CACHE']]
@ -297,7 +270,8 @@ class Importer(object):
command += ['-read', self.osm_file]
command += ['-write', '-connection', self.postgis_uri]
self.info('The database is empty. Let\'s import the PBF : %s' % self.osm_file)
self.info(' '.join(command))
self.info(command.extend(args))
if not call(command) == 0:
msg = 'An error occured in imposm with the original file.'
self.error(msg)
@ -311,14 +285,10 @@ class Importer(object):
if self.post_import_file:
self.import_custom_sql()
if self.clip_shape_file:
self._import_clip_function()
self.perform_clip_in_db()
if self.qgis_style:
self.import_qgis_styles()
def _import_diff(self):
def _import_diff(self, args):
# Finally launch the listening process.
while True:
import_queue = sorted(listdir(self.default['IMPORT_QUEUE']))
@ -336,8 +306,7 @@ class Importer(object):
command += ['-connection', self.postgis_uri]
command += [join(self.default['IMPORT_QUEUE'], diff)]
self.info(' '.join(command))
self.info(command.extend(args))
if call(command) == 0:
move(
join(self.default['IMPORT_QUEUE'], diff),
@ -346,11 +315,6 @@ class Importer(object):
# Update the timestamp in the file.
database_timestamp = diff.split('.')[0].split('->-')[1]
self.update_timestamp(database_timestamp)
if self.clip_shape_file:
self.perform_clip_in_db()
self.info('Import diff successful : %s' % diff)
else:
msg = 'An error occured in imposm with a diff.'
self.error(msg)

Wyświetl plik

@ -0,0 +1,14 @@
FROM alpine:latest
RUN apk --no-cache add --update bash curl
ENV BASE_URL='http://download.geofabrik.de'
ENV CONTINENT=''
ENV COUNTRY=''
ENV MAPPING_URL='https://raw.githubusercontent.com/kartoza/docker-osm/develop/settings'
ENV GEOJSON_URL=''
RUN mkdir /home/settings
ADD download.sh /download.sh
ENTRYPOINT ["/bin/bash", "/download.sh"]

Wyświetl plik

@ -0,0 +1,31 @@
# Download Docker OSM Files
This image is used to facilitate downloading of docker-osm files which are required to get the image
running. The image will download OSM PBF file, Mapping file, Clip Geojson and QGIS Style file.
Environment variables
**BASE_URL='http://download.geofabrik.de'**
This is used to download the OSM PBF file. Currently points to Geofabrik
**CONTINENT=''**
Used to specify what continent you need to download pbf from. This is mandatory eg `CONTINENT=africa`
**COUNTRY=''**
Used to specify which country you need to download pbf from. This is optional if you intent
to only use continent pbf. Eg `COUNTRY=lesotho`
**MAPPING_URL='https://raw.githubusercontent.com/kartoza/docker-osm/develop/settings'**
This currently points to the docker-osm repository to enable downloading of the mapping file, qgis_style
file. These files are mandatory in the running of docker-osm
**GEOJSON_URL=''**
This points to the geojson file that is used for clipping data in OSM. This can be empty if you do
not intent to use the clip functionality in docker-osm

Wyświetl plik

@ -0,0 +1,48 @@
#!/usr/bin/env bash
CONTINENT_LOCKFILE=/home/settings/.${CONTINENT}_lock
COUNTRY_LOCKFILE=/home/settings/.${COUNTRY}_lock
touch /home/settings/last.state.txt
touch /home/settings/timestamp.txt
# Download OSM Mapping file and Associated data
if [ ! -f /home/settings/mapping.yml ]; then \
wget -c ${MAPPING_URL}/mapping.yml -O /home/settings/mapping.yml
fi
if [ ! -f /home/settings/qgis_style.sql ]; then \
wget -c ${MAPPING_URL}/qgis_style.sql -O /home/settings/qgis_style.sql
fi
if [ ! -f /home/settings/post-pbf-import.sql ]; then \
url=${MAPPING_URL}/post-pbf-import.sql
if curl --output /dev/null --silent --head --fail "${url}"; then
wget -c ${MAPPING_URL}/post-pbf-import.sql -O /home/settings/post-pbf-import.sql
else
echo "URL does not exist: ${url}"
fi
fi
if [[ ! -f /home/settings/clip.geojson && -z ${GEOJSON_URL} ]]; then \
echo "We are not downloading any Geojson"
else
wget -c ${GEOJSON_URL} -O /home/settings/clip.geojson
fi
# Download OSM PBF
if [[ ! -f ${CONTINENT_LOCKFILE} && -z ${COUNTRY} ]]; then \
echo "${BASE_URL}/${CONTINENT}-latest.osm.pbf"
wget -c --no-check-certificate ${BASE_URL}/${CONTINENT}-latest.osm.pbf -O /tmp/${CONTINENT}.pbf
mv /tmp/${CONTINENT}.pbf /home/settings/country.pbf
touch ${CONTINENT_LOCKFILE}
elif [[ ! -f ${COUNTRY_LOCKFILE} ]]; then
echo "${BASE_URL}/${CONTINENT}/${COUNTRY}-latest.osm.pbf"
wget -c --no-check-certificate ${BASE_URL}/${CONTINENT}/${COUNTRY}-latest.osm.pbf -O /tmp/${COUNTRY}.pbf
mv /tmp/${COUNTRY}.pbf /home/settings/country.pbf
touch ${COUNTRY_LOCKFILE}
fi

Wyświetl plik

@ -46,6 +46,7 @@ class Enrich(object):
}
latest_diff_file = None
cache_folder = None
out_of_scope_osm_folder = None
def __init__(self):
# Default values which can be overwritten by environment variable.
@ -60,7 +61,8 @@ class Enrich(object):
'OSM_API_URL': 'https://api.openstreetmap.org/api/0.6/',
'IMPORT_DONE': 'import_done',
'CACHE': 'cache',
'MAX_DIFF_FILE_SIZE': 100000000
'MAX_DIFF_FILE_SIZE': 100000000,
'CACHE_MODIFY_CHECK': ''
}
self.mapping_file = None
self.mapping_database_schema = {}
@ -99,8 +101,22 @@ class Enrich(object):
cache_folder = join(cache_folder, 'enrich')
if not exists(cache_folder):
mkdir(cache_folder)
# out_of_scope_osm
out_of_scope_osm_folder = join(
cache_folder, 'out_of_scope_osm')
if not exists(out_of_scope_osm_folder):
mkdir(out_of_scope_osm_folder)
self.out_of_scope_osm_folder = out_of_scope_osm_folder
self.cache_folder = cache_folder
# check using not found cache for modify
if self.default['CACHE_MODIFY_CHECK'].lower() == 'true':
self.default['CACHE_MODIFY_CHECK'] = True
else:
self.default['CACHE_MODIFY_CHECK'] = False
def get_cache_path(self):
return join(self.cache_folder, 'cache')
@ -115,6 +131,36 @@ class Enrich(object):
return cache_file
return None
def is_non_recognized_id(self, osm_type, osm_id):
""" Return if osm id and type is unrecognized id
"""
if not self.default['CACHE_MODIFY_CHECK']:
return False
if self.out_of_scope_osm_folder:
if exists(
join(self.out_of_scope_osm_folder,
'%s-%s' % (osm_type, osm_id))):
return True
return False
def get_or_create_non_recognized_id(self, osm_type, osm_id):
""" Create file as cache for non recognized id
"""
if not self.default['CACHE_MODIFY_CHECK']:
return
if self.out_of_scope_osm_folder:
filename = join(
self.out_of_scope_osm_folder,
'%s-%s' % (osm_type, osm_id))
if not exists(filename):
try:
f = open(filename, 'w+')
f.close()
except IOError:
self.info('%s can\'t be created' % filename)
def check_mapping_file_data(self):
"""Perform converting yaml data into json
that used for checking table on database
@ -275,7 +321,7 @@ class Enrich(object):
for field, value in new_data.items():
try:
value = value.replace('\'', '\'\'')
except TypeError:
except (TypeError, AttributeError):
pass
sets.append('%s=\'%s\'' % (field, value))
connection = self.create_connection()
@ -425,6 +471,11 @@ class Enrich(object):
osm_data, '@id')
for table, table_data in self.mapping_database_schema.items():
if osm_data_type == table_data['osm_type']:
# check if this osm is not found on database
if self.is_non_recognized_id(osm_data_type, osm_id):
continue
connection = self.create_connection()
cursor = connection.cursor()
try:
@ -436,6 +487,9 @@ class Enrich(object):
new_data = self.get_osm_enrich_new_data(osm_data, row)
self.update_enrich_into_database(
table, table_data['osm_id_columnn'], osm_id, new_data)
else:
# if this id is not found add in cache
self.get_or_create_non_recognized_id(osm_data_type, osm_id)
except Exception as e:
self.info('error when processing %s: %s' % (osm_id, e))
connection.close()
@ -452,7 +506,6 @@ class Enrich(object):
if not exists(target_folder):
self.info('Folder %s is not ready yet' % target_folder)
return
for filename in sorted(listdir(target_folder)):
try:
if filename.endswith('.gz'):
@ -493,6 +546,7 @@ class Enrich(object):
cache_file = self.get_cache_path()
f = open(cache_file, 'w')
f.write(next_latest_diff_file)
f.close()
except IOError:
self.info('cache file can\'t be created')

Wyświetl plik

@ -0,0 +1,12 @@
# Docker-osmenrich
Docker osm-enrich is the extension for docker osm to get the changeset of the osm data.
It will get the data from osm API and also get the update data from files that generated from docker-osmupdate
- data is new (changeset is null) : get from docker osm
- data is exist but need to check the recent changeset : get data from file generated from osmupdate, update into database
osmenrich will create new fields which are:
- changeset_id
- changeset_timestamp
- changeset_version
- changeset_user

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 260 KiB

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 397 KiB

Wyświetl plik

@ -57,4 +57,4 @@ if url:
f.write(diff)
else:
print('This area is unkown in geofabrik or in our script. Check with the list argument.')
print('This area is unknown in geofabrik or in our script. Check with the list argument.')

Wyświetl plik

@ -2,14 +2,19 @@
A docker compose project to setup an OSM PostGIS database with automatic
updates from OSM periodically.
The only file you need is a PBF file and run the docker compose project.
The only files you need is a PBF file, geojson (if you intent to restrict data download to
a smaller extent than the one specified by the PBF) and run the docker compose project.
## General architecture
![Alt text](/docs/architecture.png?raw=true "Optional Title")
## Quick setup
As a quick example, we are going to setup Docker-OSM with default values everywhere:
* Download a PBF file from http://download.geofabrik.de/
* Put the file in the `settings` folder.
* Run the docker-compose file and make sure the environment variables are setup properly for
osm_downloader to download the correct pbf file.
* If you want to connect from your local QGIS Desktop:
* In the file `docker-compose.yml`, uncomment the block:
@ -18,8 +23,9 @@ As a quick example, we are going to setup Docker-OSM with default values everywh
ports:
- "35432:5432"
```
* Do `make run` in the build directory. This will download and execute the docker-osm project. It might be very long depending of your bandwidth and the PBF you are importing.
* In QGIS, add a new PostGIS connexion: `localhost`, database `gis`, port `35432`, `docker` for both username and password.
* Do `make run` in the build directory. This will download and execute the docker-osm project.
It might be very long depending of your bandwidth and the PBF you are importing.
* In QGIS, add a new PostGIS connection: `localhost`, database `gis`, port `35432`, `docker` for both username and password.
* That's it! You have an OSM database, up and running. The update is done every 2 minutes from the main OSM website.
For further reading and customizations, read below.
@ -43,15 +49,11 @@ your existing docker-compose project.
In this example we will set up an OSM database for South Africa that
will pull for updates every 2 minutes.
First get a PBF file from your area and put this file in the 'settings' folder.
Specify a PBF file for your area in the environment variables for `osm_downloader` container.
You can download some PBF files on these URLS for instance :
* http://download.geofabrik.de/
* http://download.openstreetmap.fr/extracts/
```bash
cd settings
wget -c -O country.pbf http://download.openstreetmap.fr/extracts/africa/south_africa.osm.pbf
```
You must put only one PBF file in the settings folder. Only the last one will be read.
@ -63,6 +65,12 @@ website: https://imposm.org/docs/imposm3/latest/mapping.html
The default file in Docker-OSM is coming from
https://raw.githubusercontent.com/omniscale/imposm3/master/example-mapping.yml
**Note** that you can't import OSM metadata such as author, timestamp or version.
This is a limitation from ImpOSM, check the feature request on the [Imposm repository](https://github.com/omniscale/imposm3/issues/58).
Imposm is designed for spatial analysis, not for OSM contribution analysis.
If you need such a feature, you need to use another database schema supporting OSM Metadata.
You can check the [OSM Wiki](https://wiki.openstreetmap.org/wiki/Databases_and_data_access_APIs#Database_Schemas) for "Lossless" schemas.
### Updates
You can configure the time interval in the docker-compose file. By default,
@ -74,14 +82,19 @@ you don't set a clipping area, you will end with data from all over the world.
### Clipping
You can put a shapefile in the clip folder. This shapefile will be
used for clipping every features after the import.
This file has to be named 'clip.shp' and in the CRS you are using in the database (4326 by default).
When the database container is running, import the shapefile in the database using the command :
During the initial import or post update imposm uses the flag `-limito` which allows
you to define a smaller area that you can work with.
This is always desirable to limit the features being imported into the database rather
than clipping them.
`make import_clip`.
**NB:** Ensure you add a geojson covering the area you intent to clip into the settings folder.
The geojson can be the same extent of the administrative area of your country or it can be a
smaller extent. The CRS of the geojson should always be EPSG:4326.
You can remove the clip file : `make remove_clip`.
**NB:** It is encouraged to simplify the geometry for the `clip.geojson` as
a simplified geometry is easier to process during the import.
Rather use the minimum bounding box for the area you intent to clip your dataset with.
### QGIS Styles
@ -95,9 +108,11 @@ make remove_styles
make backup_styles
```
### SQL Trigger
### SQL Trigger, functions, views...
You can add PostGIS functions, triggers, materialized views in the SQL file.
You can add PostGIS functions, triggers, materialized views in a
SQL file called `post-pbf-import.sql`.
It will be imported automatically in the database.
### Build and run
@ -228,16 +243,24 @@ With -e, you can add some settings to PostGIS:
```bash
- ALLOW_IP_RANGE= 0.0.0.0/0
```
More environment variables for Kartoza/postgis image can be found from https://github.com/kartoza/docker-postgis#environment-variables
# QGIS Server
# QGIS Server and Martin Vector tiles
You can run a QGIS Server front end to the OSM mirroir by using the provided
You can run a QGIS Server front end or martin vector tiles to the OSM mirror by using the provided
docker-compose-web.yml file. For example:
```bash
docker-compose -f docker-compose.yml -f docker-compose-web.yml qgisserver up
```
or
```bash
docker-compose -f docker-compose.yml -f docker-compose-web.yml martin up
```
For more information about martin configuration and usage can be found from https://github.com/urbica/martin
# Credits
This application was designed and implemented by:

File diff suppressed because one or more lines are too long

Wyświetl plik

@ -1,19 +0,0 @@
CREATE OR REPLACE FUNCTION clean_tables() RETURNS void AS
$BODY$
DECLARE osm_tables CURSOR FOR
SELECT table_name
FROM information_schema.tables
WHERE table_schema='public'
AND table_type='BASE TABLE'
AND table_name LIKE 'osm_%';
BEGIN
FOR osm_table IN osm_tables LOOP
EXECUTE 'DELETE FROM ' || quote_ident(osm_table.table_name) || ' WHERE osm_id IN (
SELECT DISTINCT osm_id
FROM ' || quote_ident(osm_table.table_name) || '
LEFT JOIN clip ON ST_Intersects(geometry, geom))
;';
END LOOP;
END;
$BODY$
LANGUAGE plpgsql;

Plik diff jest za duży Load Diff