2020-06-01 16:54:30 +00:00
#
# First section - common variable initialization
#
2020-05-04 20:41:55 +00:00
# Ensure that errors don't hide inside pipes
SHELL = /bin/bash
2021-06-09 10:36:27 +00:00
.SHELLFLAGS = -o pipefail -c
2020-05-04 20:41:55 +00:00
2020-08-27 16:43:09 +00:00
# Layers definition and meta data
2022-04-28 07:30:15 +00:00
TILESET_FILE := $( or $( TILESET_FILE) ,$( shell ( . .env; echo $$ { TILESET_FILE} ) ) ,openmaptiles.yaml)
2020-08-27 16:43:09 +00:00
2019-12-11 14:26:25 +00:00
# Options to run with docker and docker-compose - ensure the container is destroyed on exit
2020-03-06 18:15:54 +00:00
# Containers run as the current user rather than root (so that created files are not root-owned)
2020-08-03 16:12:09 +00:00
DC_OPTS ?= --rm --user= $( shell id -u) :$( shell id -g)
2019-12-12 17:40:31 +00:00
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
# If set to a non-empty value, will use postgis-preloaded instead of postgis docker image
2020-06-01 16:54:30 +00:00
USE_PRELOADED_IMAGE ?=
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
2020-05-26 19:19:52 +00:00
# Local port to use with postserve
2020-06-01 16:54:30 +00:00
PPORT ?= 8090
2020-05-26 19:19:52 +00:00
export PPORT
# Local port to use with tileserver
2020-06-01 16:54:30 +00:00
TPORT ?= 8080
2020-05-26 19:19:52 +00:00
export TPORT
2022-10-06 14:09:21 +00:00
STYLE_FILE := build/style/style.json
STYLE_HEADER_FILE := style/style-header.json
2020-05-26 19:19:52 +00:00
2023-03-15 21:16:00 +00:00
# Support newer `docker compose` syntax in addition to `docker-compose`
i f e q ( , $( shell which docker -compose ) )
DOCKER_COMPOSE_COMMAND := docker compose
$( info Using docker compose V2 ( docker compose) )
e l s e
DOCKER_COMPOSE_COMMAND := docker-compose
$( info Using docker compose V1 ( docker-compose) )
e n d i f
2020-04-09 16:05:01 +00:00
# Allow a custom docker-compose project name
2022-04-28 07:30:15 +00:00
DC_PROJECT := $( or $( DC_PROJECT) ,$( shell ( . .env; echo $$ { DC_PROJECT} ) ) )
i f e q ( $( DC_PROJECT ) , )
2020-06-01 16:54:30 +00:00
DC_PROJECT := $( notdir $( shell pwd ) )
2023-03-15 21:16:00 +00:00
DOCKER_COMPOSE := $( DOCKER_COMPOSE_COMMAND)
2020-04-21 17:36:07 +00:00
e l s e
2023-03-15 21:16:00 +00:00
DOCKER_COMPOSE := $( DOCKER_COMPOSE_COMMAND) --project-name $( DC_PROJECT)
2020-04-09 16:05:01 +00:00
e n d i f
2020-04-24 20:44:02 +00:00
# Make some operations quieter (e.g. inside the test script)
2022-04-28 07:30:15 +00:00
i f e q ( $( or $ ( QUIET ) ,$ ( shell ( . .env ; echo $ $ {QUIET }) ) ) ) , )
2020-06-01 16:54:30 +00:00
QUIET_FLAG :=
2020-04-24 20:44:02 +00:00
e l s e
2020-06-01 16:54:30 +00:00
QUIET_FLAG := --quiet
2020-04-24 20:44:02 +00:00
e n d i f
2020-04-09 16:05:01 +00:00
# Use `xargs --no-run-if-empty` flag, if supported
2020-06-01 16:54:30 +00:00
XARGS := xargs $( shell xargs --no-run-if-empty </dev/null 2>/dev/null && echo --no-run-if-empty)
2020-04-09 16:05:01 +00:00
2019-12-16 13:37:34 +00:00
# If running in the test mode, compare files rather than copy them
TEST_MODE ?= no
i f e q ( $( TEST_MODE ) , y e s )
2020-01-23 02:55:22 +00:00
# create images in ./build/devdoc and compare them to ./layers
GRAPH_PARAMS = ./build/devdoc ./layers
2019-12-16 13:37:34 +00:00
e l s e
2020-01-23 02:55:22 +00:00
# update graphs in the ./layers dir
GRAPH_PARAMS = ./layers
2019-12-16 13:37:34 +00:00
e n d i f
2020-06-01 16:54:30 +00:00
# Set OpenMapTiles host
2022-04-28 07:30:15 +00:00
export OMT_HOST := http://$( firstword $( subst :, ,$( subst tcp://,,$( DOCKER_HOST) ) ) localhost)
2020-06-01 16:54:30 +00:00
2020-06-04 19:45:04 +00:00
# This defines an easy $(newline) value to act as a "\n". Make sure to keep exactly two empty lines after newline.
d e f i n e n e w l i n e
e n d e f
2022-04-28 07:30:15 +00:00
# Use the old Postgres connection values as a fallback
PGHOST := $( or $( PGHOST) ,$( shell ( . .env; echo $$ { PGHOST} ) ) ,$( POSTGRES_HOST) ,$( shell ( . .env; echo $$ { POSTGRES_HOST} ) ) ,postgres)
PGPORT := $( or $( PGPORT) ,$( shell ( . .env; echo $$ { PGPORT} ) ) ,$( POSTGRES_PORT) ,$( shell ( . .env; echo $$ { POSTGRES_PORT} ) ) ,postgres)
PGDATABASE := $( or $( PGDATABASE) ,$( shell ( . .env; echo $$ { PGDATABASE} ) ) ,$( POSTGRES_DB) ,$( shell ( . .env; echo $$ { POSTGRES_DB} ) ) ,postgres)
PGUSER := $( or $( PGUSER) ,$( shell ( . .env; echo $$ { PGUSER} ) ) ,$( POSTGRES_USER) ,$( shell ( . .env; echo $$ { POSTGRES_USER} ) ) ,postgres)
PGPASSWORD := $( or $( PGPASSWORD) ,$( shell ( . .env; echo $$ { PGPASSWORD} ) ) ,$( POSTGRES_PASSWORD) ,$( shell ( . .env; echo $$ { POSTGRES_PASSWORD} ) ) ,postgres)
2020-06-01 16:54:30 +00:00
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
#
# Determine area to work on
2020-10-01 05:10:01 +00:00
# If $(area) parameter is not set, and only one *.osm.pbf file is found in ./data, use it as $(area).
# Otherwise, all make targets requiring an area will show an error.
# Note: If no *.osm.pbf files are found, once the users call "make download area=..."
# they will not need to use an "area=" parameter again because there will be just a single file.
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
#
# historically we have been using $(area) rather than $(AREA), so make both work
area ?= $( AREA)
2020-10-01 05:10:01 +00:00
# Ensure the $(area) param is set, or try to automatically determine it based on available data files
2022-04-28 07:30:15 +00:00
i f e q ( $( area ) , )
2020-10-01 05:10:01 +00:00
# An $(area) parameter is not set. If only one *.osm.pbf file is found in ./data, use it as $(area).
data_files := $( shell find data -name '*.osm.pbf' 2>/dev/null)
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
ifneq ( $( word 2,$( data_files) ) ,)
2020-10-01 05:10:01 +00:00
define assert_area_is_given
@echo ""
@echo "ERROR: The 'area' parameter or environment variable have not been set, and there several 'area' options:"
@$( patsubst data/%.osm.pbf,echo " '%'" ; ,$( data_files) )
@echo ""
@echo "To specify an area use:"
@echo " make $@ area=<area-id> "
@echo ""
@exit 1
endef
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
else
ifeq ( $( word 1,$( data_files) ) ,)
2020-10-01 05:10:01 +00:00
define assert_area_is_given
@echo ""
@echo "ERROR: The 'area' parameter (or env var) has not been set, and there are no data/*.osm.pbf files"
@echo ""
@echo "To specify an area use"
@echo " make $@ area=<area-id> "
@echo ""
@echo "To download an area, use make download area=<area-id>"
@echo "To list downloadable areas, use make list-geofabrik and/or make list-bbbike"
@exit 1
@echo ""
endef
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
else
# Keep just the name of the data file, without the .osm.pbf extension
2020-10-01 05:10:01 +00:00
area := $( patsubst data/%.osm.pbf,%,$( data_files) )
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
# Rename area-latest.osm.pbf to area.osm.pbf
# TODO: This if statement could be removed in a few months once everyone is using the file without the `-latest`?
ifneq ( $( area) ,$( area:-latest= ) )
$( shell mv " data/ $( area) .osm.pbf " " data/ $( area:-latest= ) .osm.pbf " )
area := $( area:-latest= )
$( warning ATTENTION: File data/$( area) -latest.osm.pbf was renamed to $( area) .osm.pbf.)
2020-10-01 05:10:01 +00:00
AREA_INFO := Detected area = $( area) based on finding a 'data/$(area)-latest.osm.pbf' file - renamed to '$(area).osm.pbf' . Use 'area' parameter or environment variable to override.
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
else
2020-12-07 15:30:16 +00:00
AREA_INFO := Detected area = $( area) based on finding a 'data/$(area).osm.pbf' file. Use 'area' parameter or environment variable to override.
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
endif
endif
endif
e n d i f
2022-04-28 07:30:15 +00:00
i f n e q ( $( AREA_INFO ) , )
2020-10-01 05:10:01 +00:00
define assert_area_is_given
@echo " $( AREA_INFO) "
endef
e n d i f
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
# If set, this file will be downloaded in download-osm and imported in the import-osm targets
PBF_FILE ?= data/$( area) .osm.pbf
# For download-osm, allow URL parameter to download file from a given URL. Area param must still be provided.
2022-04-28 07:30:15 +00:00
DOWNLOAD_AREA := $( or $( url) , $( area) )
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
2022-04-28 07:30:15 +00:00
# The mbtiles file is placed into the $EXPORT_DIR=/export (mapped to ./data)
MBTILES_FILE := $( or $( MBTILES_FILE) ,$( shell ( . .env; echo $$ { MBTILES_FILE} ) ) ,$( area) .mbtiles)
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
MBTILES_LOCAL_FILE = data/$( MBTILES_FILE)
2022-04-28 07:30:15 +00:00
DIFF_MODE := $( or $( DIFF_MODE) ,$( shell ( . .env; echo $$ { DIFF_MODE} ) ) )
i f e q ( $( DIFF_MODE ) , t r u e )
2020-06-09 14:00:10 +00:00
# import-osm implementation requires IMPOSM_CONFIG_FILE to be set to a valid file
2022-04-28 07:30:15 +00:00
# For one-time only imports, the default value is fine.
# For diff mode updates, use the dynamically-generated area-based config file
2020-06-09 14:00:10 +00:00
export IMPOSM_CONFIG_FILE = data/$( area) .repl.json
e n d i f
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
2020-10-26 16:39:49 +00:00
# Load area-specific bbox file that gets generated by the download-osm --bbox
AREA_BBOX_FILE ?= data/$( area) .bbox
i f n e q ( , $( wildcard $ ( AREA_BBOX_FILE ) ) )
cat := $( if $( filter $( OS) ,Windows_NT) ,type,cat)
BBOX := $( shell $( cat) ${ AREA_BBOX_FILE } )
export BBOX
e n d i f
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
2022-04-28 07:30:15 +00:00
# Consult .env if needed
MIN_ZOOM := $( or $( MIN_ZOOM) ,$( shell ( . .env; echo $$ { MIN_ZOOM} ) ) ,0)
MAX_ZOOM := $( or $( MAX_ZOOM) ,$( shell ( . .env; echo $$ { MAX_ZOOM} ) ) ,7)
PPORT := $( or $( PPORT) ,$( shell ( . .env; echo $$ { PPORT} ) ) ,7)
TPORT := $( or $( TPORT) ,$( shell ( . .env; echo $$ { TPORT} ) ) ,7)
2021-06-09 10:36:27 +00:00
d e f i n e H E L P _ M E S S A G E
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
2023-01-30 13:04:47 +00:00
OpenMapTiles https : //github .com /openmaptiles /openmaptiles
2021-06-09 10:36:27 +00:00
H i n t s f o r t e s t i n g a r e a s
make list-geofabrik # list actual geofabrik OSM extracts for download -> <<your-area>>
./quickstart.sh <<your-area>> # example: ./quickstart.sh madagascar
Hints for designers :
make start-maputnik # start Maputnik Editor + dynamic tile server [ see $(OMT_HOST):8088 ]
2022-02-22 11:43:59 +00:00
make stop-maputnik # stop Maputnik Editor + dynamic tile server
2021-06-09 10:36:27 +00:00
make start-postserve # start dynamic tile server [ see $(OMT_HOST):$(PPORT) ]
make stop-postserve # stop dynamic tile server
make start-tileserver # start maptiler/tileserver-gl [ see $(OMT_HOST):$(TPORT) ]
2022-02-22 11:43:59 +00:00
make stop-tileserver # stop maptiler/tileserver-gl
2021-06-09 10:36:27 +00:00
Hints for developers :
make # build source code
make bash # start openmaptiles-tools /bin/bash terminal
make generate-bbox-file # compute bounding box of a data file and store it in a file
make generate-devdoc # generate devdoc including graphs for all layers [./layers/...]
make generate-qa # statistics for a given layer's field
2021-08-26 01:35:06 +00:00
make generate-tiles-pg # generate vector tiles based on .env settings using PostGIS ST_MVT()
make generate-tiles # generate vector tiles based on .env settings using Mapnik (obsolete)
2022-07-26 16:59:34 +00:00
make generate-changed-tiles # Generate tiles changed by import-diff
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
make test-sql # run unit tests on the OpenMapTiles SQL schema
2021-06-09 10:36:27 +00:00
cat .env # list PG database and MIN_ZOOM and MAX_ZOOM information
cat quickstart.log # transcript of the last ./quickstart.sh run
make help # help about available commands
Hints for downloading & importing data :
make list-geofabrik # list actual geofabrik OSM extracts for download
make list-bbbike # list actual BBBike OSM extracts for download
make download area = albania # download OSM data from any source and create config file
make download-geofabrik area = albania # download OSM data from geofabrik.de and create config file
make download-osmfr area = asia/qatar # download OSM data from openstreetmap.fr and create config file
make download-bbbike area = Amsterdam # download OSM data from bbbike.org and create config file
make import-data # Import data from OpenStreetMapData, Natural Earth and OSM Lake Labels.
make import-osm # Import OSM data with the mapping rules from build/mapping.yaml
2022-07-26 16:59:34 +00:00
make import-diff # Import OSM updates from data/changes.osc.gz
2021-06-09 10:36:27 +00:00
make import-wikidata # Import labels from Wikidata
make import-sql # Import layers (run this after modifying layer SQL)
Hints for database management :
make psql # start PostgreSQL console
make psql-list-tables # list all PostgreSQL tables
make list-views # list PostgreSQL public schema views
make list-tables # list PostgreSQL public schema tables
make vacuum-db # PostgreSQL: VACUUM ANALYZE
make analyze-db # PostgreSQL: ANALYZE
make destroy-db # remove docker containers and PostgreSQL data volume
make start-db # start PostgreSQL, creating it if it doesn't exist
make start-db-preloaded # start PostgreSQL, creating data-prepopulated one if it doesn't exist
make stop-db # stop PostgreSQL database without destroying the data
Hints for Docker management :
make clean-unnecessary-docker # clean unnecessary docker image(s) and container(s)
make refresh-docker-images # refresh openmaptiles docker images from Docker HUB
make remove-docker-images # remove openmaptiles docker images
make list-docker-images # show a list of available docker images
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
e n d e f
export HELP_MESSAGE
2020-06-01 16:54:30 +00:00
#
# TARGETS
#
2019-12-11 14:26:25 +00:00
.PHONY : all
2022-10-06 14:09:21 +00:00
all : init -dirs build /openmaptiles .tm 2source /data .yml build /mapping .yaml build -sql build -style
2016-10-28 19:56:39 +00:00
2019-12-11 14:26:25 +00:00
.PHONY : help
2016-11-30 04:39:06 +00:00
help :
2021-06-09 10:36:27 +00:00
@echo " $$ HELP_MESSAGE " | less
2016-11-30 04:39:06 +00:00
2021-04-30 05:57:12 +00:00
d e f i n e w i n _ f s _ e r r o r
( \
echo "" ; \
echo "ERROR: Windows native filesystem" ; \
echo "" ; \
echo "Please avoid running OpenMapTiles in a Windows filesystem." ; \
echo "See https://github.com/openmaptiles/openmaptiles/issues/1095#issuecomment-817095465" ; \
echo "" ; \
exit 1 ; \
)
e n d e f
2020-03-06 18:15:54 +00:00
.PHONY : init -dirs
init-dirs :
2020-06-08 16:26:11 +00:00
@mkdir -p build/sql/parallel
@mkdir -p build/openmaptiles.tm2source
2022-10-06 14:09:21 +00:00
@mkdir -p build/style
2021-09-29 09:08:55 +00:00
@mkdir -p data
2020-04-22 06:48:57 +00:00
@mkdir -p cache
2021-09-29 12:51:00 +00:00
@ ! ( $( DOCKER_COMPOSE) 2>/dev/null run $( DC_OPTS) openmaptiles-tools df --output= fstype /tileset| grep -q 9p) < /dev/null || ( $( win_fs_error) )
2016-10-23 15:36:48 +00:00
2020-03-06 18:15:54 +00:00
build/openmaptiles.tm2source/data.yml : init -dirs
2020-06-08 16:26:11 +00:00
i f e q ( , $( wildcard build /openmaptiles .tm 2source /data .yml ) )
2021-10-06 16:02:52 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash -c \
2022-04-28 07:30:15 +00:00
'generate-tm2source $(TILESET_FILE) > $@'
2020-06-08 16:26:11 +00:00
e n d i f
2016-10-23 15:36:48 +00:00
2020-03-06 18:15:54 +00:00
build/mapping.yaml : init -dirs
2020-06-08 16:26:11 +00:00
i f e q ( , $( wildcard build /mapping .yaml ) )
2021-10-06 16:02:52 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash -c \
'generate-imposm3 $(TILESET_FILE) > $@'
2020-06-08 16:26:11 +00:00
e n d i f
2019-03-22 08:34:00 +00:00
2020-04-22 15:15:20 +00:00
.PHONY : build -sql
build-sql : init -dirs
2020-06-08 16:26:11 +00:00
i f e q ( , $( wildcard build /sql /run_last .sql ) )
@mkdir -p build/sql/parallel
2020-05-20 17:52:18 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash -c \
2020-08-27 16:43:09 +00:00
' generate-sql $( TILESET_FILE) --dir ./build/sql \
&& generate-sqltomvt $( TILESET_FILE) \
2024-04-03 13:26:59 +00:00
--key --gzip --postgis-ver 3.3.4 \
2020-06-08 16:26:11 +00:00
--function --fname= getmvt >> ./build/sql/run_last.sql'
e n d i f
2016-10-23 15:36:48 +00:00
2022-10-06 14:09:21 +00:00
.PHONY : build -sprite
build-sprite : init -dirs
2024-02-26 14:06:12 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash -c ' spreet /style/icons build/style/sprite && \
spreet --retina /style/icons build/style/sprite@2x'
2022-10-06 14:09:21 +00:00
.PHONY : build -style
build-style : init -dirs
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash -c ' style-tools recompose $( TILESET_FILE) $( STYLE_FILE) \
$( STYLE_HEADER_FILE) && \
2024-02-26 14:06:12 +00:00
spreet /style/icons build/style/sprite && spreet --retina /style/icons build/style/sprite@2x'
2022-10-06 14:09:21 +00:00
.PHONY : download -fonts
download-fonts :
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash -c ' [ ! -d "/export/fonts" ] && mkdir /export/fonts && \
echo "Downloading fonts..." && wget -qO /export/noto-sans.zip --show-progress \
https://github.com/openmaptiles/fonts/releases/download/v2.0/noto-sans.zip && \
echo "Unzipping fonts..." && unzip -q /export/noto-sans.zip -d /export/fonts && rm /export/noto-sans.zip || \
echo "Fonts already exist." '
2019-12-11 14:26:25 +00:00
.PHONY : clean
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
clean : clean -test -data
2020-03-06 18:15:54 +00:00
rm -rf build
2016-11-27 10:31:43 +00:00
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
clean-test-data :
rm -rf data/changes.state.txt
rm -rf data/last.state.txt
rm -rf data/changes.repl.json
2020-05-09 17:30:57 +00:00
.PHONY : destroy -db
2022-04-28 07:30:15 +00:00
DOCKER_PROJECT = $( shell echo $( DC_PROJECT) | tr A-Z a-z | tr -cd '[:alnum:]' )
2020-05-09 17:30:57 +00:00
destroy-db :
2020-04-09 16:05:01 +00:00
$( DOCKER_COMPOSE) down -v --remove-orphans
$( DOCKER_COMPOSE) rm -fv
2022-04-28 07:30:15 +00:00
docker volume ls -q -f " name=^ $( DOCKER_PROJECT) _ " | $( XARGS) docker volume rm
2020-03-06 18:15:54 +00:00
rm -rf cache
2021-10-13 19:54:19 +00:00
mkdir cache
2016-11-28 22:15:47 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : start -db -nowait
2020-05-20 15:27:42 +00:00
start-db-nowait : init -dirs
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
@echo " Starting postgres docker compose target using $$ {POSTGIS_IMAGE:-default} image (no recreate if exists) " && \
2020-04-22 06:48:57 +00:00
$( DOCKER_COMPOSE) up --no-recreate -d postgres
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : start -db
start-db : start -db -nowait
2019-12-12 17:40:31 +00:00
@echo "Wait for PostgreSQL to start..."
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools pgwait
2020-05-09 17:30:57 +00:00
# Wrap start-db target but use the preloaded image
.PHONY : start -db -preloaded
start-db-preloaded : export POSTGIS_IMAGE =openmaptiles /postgis -preloaded
2020-05-20 15:50:54 +00:00
start-db-preloaded : export COMPOSE_HTTP_TIMEOUT =180
2020-05-09 17:30:57 +00:00
start-db-preloaded : start -db
2017-01-20 22:30:02 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : stop -db
stop-db :
2020-04-22 06:48:57 +00:00
@echo "Stopping PostgreSQL..."
2020-04-09 16:05:01 +00:00
$( DOCKER_COMPOSE) stop postgres
2020-03-06 18:15:54 +00:00
2020-04-22 15:15:20 +00:00
.PHONY : list -geofabrik
2020-05-20 15:27:42 +00:00
list-geofabrik : init -dirs
2020-04-22 15:15:20 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools download-osm list geofabrik
2020-06-01 16:54:30 +00:00
.PHONY : list -bbbike
list-bbbike : init -dirs
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools download-osm list bbbike
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
#
# download, download-geofabrik, download-osmfr, and download-bbbike are handled here
# The --imposm-cfg will fail for some of the sources, but we ignore that error -- only needed for diff mode
#
2020-06-01 16:54:30 +00:00
OSM_SERVERS := geofabrik osmfr bbbike
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
ALL_DOWNLOADS := $( addprefix download-,$( OSM_SERVERS) ) download
OSM_SERVER = $( patsubst download,,$( patsubst download-%,%,$@ ) )
2020-04-21 17:36:07 +00:00
.PHONY : $( ALL_DOWNLOADS )
$(ALL_DOWNLOADS) : init -dirs
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
@$( assert_area_is_given)
2022-04-28 07:30:15 +00:00
i f n e q ( $( url ) , )
2020-06-04 19:45:04 +00:00
$( if $( OSM_SERVER) ,$( error url parameter can only be used with non-specific download target:$( newline) make download area = $( area) url = " $( url) " $( newline) ) )
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
e n d i f
2020-06-04 19:45:04 +00:00
i f e q ( , $( wildcard $ ( PBF_FILE ) ) )
2022-04-28 07:30:15 +00:00
ifeq ( $( DIFF_MODE) ,true)
2020-06-09 14:00:10 +00:00
@echo " Downloading $( DOWNLOAD_AREA) with replication support into $( PBF_FILE) and $( IMPOSM_CONFIG_FILE) from $( if $( OSM_SERVER) ,$( OSM_SERVER) ,any source ) "
@$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools download-osm $( OSM_SERVER) " $( DOWNLOAD_AREA) " \
2020-06-04 19:45:04 +00:00
--imposm-cfg " $( IMPOSM_CONFIG_FILE) " \
2020-10-26 16:39:49 +00:00
--bbox " $( AREA_BBOX_FILE) " \
2020-06-09 14:00:10 +00:00
--output " $( PBF_FILE) "
else
@echo " Downloading $( DOWNLOAD_AREA) into $( PBF_FILE) from $( if $( OSM_SERVER) ,$( OSM_SERVER) ,any source ) "
@$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools download-osm $( OSM_SERVER) " $( DOWNLOAD_AREA) " \
2020-10-26 16:39:49 +00:00
--bbox " $( AREA_BBOX_FILE) " \
2020-06-09 14:00:10 +00:00
--output " $( PBF_FILE) "
endif
2020-04-21 17:36:07 +00:00
@echo ""
e l s e
2022-04-28 07:30:15 +00:00
ifeq ( $( DIFF_MODE) ,true)
2020-06-09 14:00:10 +00:00
ifeq ( ,$( wildcard $( IMPOSM_CONFIG_FILE) ) )
$( error \
$( newline) Data files $( PBF_FILE) already exists, but $( IMPOSM_CONFIG_FILE) does not. \
$( newline) You probably downloaded the data file before setting DIFF_MODE = true. \
$( newline) You can delete the data file $( PBF_FILE) and re-run make download \
$( newline) to re-download and generate config, or manually create $( IMPOSM_CONFIG_FILE) \
$( newline) See example https://github.com/openmaptiles/openmaptiles-tools/blob/v5.2/bin/config/repl_config.json \
$( newline) )
else
@echo " Data files $( PBF_FILE) and replication config $( IMPOSM_CONFIG_FILE) already exists, skipping the download. "
endif
else
2020-06-04 19:45:04 +00:00
@echo " Data files $( PBF_FILE) already exists, skipping the download. "
2020-06-09 14:00:10 +00:00
endif
2020-06-04 19:45:04 +00:00
e n d i f
2020-10-26 16:39:49 +00:00
.PHONY : generate -bbox -file
generate-bbox-file :
2020-06-04 19:45:04 +00:00
@$( assert_area_is_given)
2020-10-26 16:39:49 +00:00
i f e q ( , $( wildcard $ ( AREA_BBOX_FILE ) ) )
@$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools download-osm bbox " $( PBF_FILE) " " $( AREA_BBOX_FILE) "
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
e l s e
2021-03-30 19:02:34 +00:00
@echo " Configuration file $( AREA_BBOX_FILE) already exists, no need to regenerate. BBOX= $( BBOX) "
2020-04-21 17:36:07 +00:00
e n d i f
2016-11-28 23:27:44 +00:00
2019-12-11 14:26:25 +00:00
.PHONY : psql
2020-05-09 17:30:57 +00:00
psql : start -db -nowait
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools sh -c 'pgwait && psql.sh'
2017-11-10 20:28:12 +00:00
2020-08-03 16:12:09 +00:00
# Special cache handling for Docker Toolbox on Windows
i f e q ( $( MSYSTEM ) , M I N G W 6 4 )
DC_CONFIG_CACHE := -f docker-compose.yml -f docker-compose-$( MSYSTEM) .yml
2022-04-28 07:30:15 +00:00
DC_OPTS_CACHE := $( filter-out --user= %,$( DC_OPTS) )
2020-08-03 16:12:09 +00:00
e l s e
DC_OPTS_CACHE := $( DC_OPTS)
e n d i f
2019-12-11 14:26:25 +00:00
.PHONY : import -osm
2020-05-09 17:30:57 +00:00
import-osm : all start -db -nowait
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
@$( assert_area_is_given)
2020-08-03 16:12:09 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-osm $(PBF_FILE)'
2018-01-16 08:53:59 +00:00
2022-02-22 11:43:59 +00:00
.PHONY : start -update -osm
2022-07-26 16:59:34 +00:00
start-update-osm : start -db
2020-06-09 14:00:10 +00:00
@$( assert_area_is_given)
2022-02-22 11:43:59 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) up -d update-osm
.PHONY : stop -update -osm
stop-update-osm :
$( DOCKER_COMPOSE) stop update-osm
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
.PHONY : import -diff
2022-07-26 16:59:34 +00:00
import-diff : start -db -nowait
2020-06-09 14:00:10 +00:00
@$( assert_area_is_given)
2020-08-03 16:12:09 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-diff'
2020-03-06 18:15:54 +00:00
2020-04-22 06:48:57 +00:00
.PHONY : import -data
2020-05-09 17:30:57 +00:00
import-data : start -db
2020-08-03 16:12:09 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) import-data
2020-04-22 06:48:57 +00:00
2020-04-22 15:15:20 +00:00
.PHONY : import -sql
2020-05-09 17:30:57 +00:00
import-sql : all start -db -nowait
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools sh -c 'pgwait && import-sql' | \
2021-09-21 08:42:50 +00:00
awk -v s = ": WARNING:" '1{print; fflush()} $$0~s{print "\n*** WARNING detected, aborting"; exit(1)}' | \
awk '1{print; fflush()} $$0~".*ERROR" {txt=$$0} END{ if(txt){print "\n*** ERROR detected, aborting:"; print txt; exit(1)} }'
2018-01-16 08:53:59 +00:00
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
.PHONY : generate -tiles
2020-05-20 15:27:42 +00:00
generate-tiles : all start -db
2021-08-26 01:35:06 +00:00
@echo "WARNING: This Mapnik-based method of tile generation is obsolete. Use generate-tiles-pg instead."
Rework download area support (#908)
Closes #904
* Make all data-related targets like `download*`, `import-osm`, `import-borders`, and `generate-tiles` into `area`-aware -- making it possible for multiple data files to coexist inside the `./data` dir.
* Add `make download area=... [url=...]` command to automatically download any kind of area by checking Geofabrik, BBBike, and OSM.fr, optionally from a custom URL. Supports `area=planet` too.
* Do not re-download area with `make download-*` if it already exists.
* Automatically rename `<area>-latest.osm.pbf` into `<area>.osm.pbf`
* If `area=...` parameter is not given to `make`, see if there is exactly one `*.osm.pbf` file, and if so, use `*` as the `area`.
* Configure many variables in the .env file, overriding the defaults in tools
* If `<area>.osm.pbf` exists, but `<area>.dc-config.pbf` is missing, generate it using `download-osm make-dc` command.
Also:
* closes #614
* closes #647
* partially addresses #261
2020-06-03 19:37:45 +00:00
@echo " Generating tiles into $( MBTILES_LOCAL_FILE) (will delete if already exists)... "
@rm -rf " $( MBTILES_LOCAL_FILE) "
2020-10-26 16:39:49 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) generate-vectortiles
2020-04-22 15:15:20 +00:00
@echo "Updating generated tile metadata ..."
2020-10-26 16:39:49 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools \
2020-08-27 16:43:09 +00:00
mbtiles-tools meta-generate " $( MBTILES_LOCAL_FILE) " $( TILESET_FILE) --auto-minmax --show-ranges
2020-12-08 13:56:01 +00:00
.PHONY : generate -tiles -pg
generate-tiles-pg : all start -db
2021-08-26 01:35:06 +00:00
@echo " Generating tiles into $( MBTILES_LOCAL_FILE) (will delete if already exists) using PostGIS ST_MVT()... "
2020-12-08 13:56:01 +00:00
@rm -rf " $( MBTILES_LOCAL_FILE) "
2021-09-01 12:18:06 +00:00
# For some reason Ctrl+C doesn't work here without the -T. Must be pressed twice to stop.
$( DOCKER_COMPOSE) run -T $( DC_OPTS) openmaptiles-tools generate-tiles
2020-12-08 13:56:01 +00:00
@echo "Updating generated tile metadata ..."
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools \
mbtiles-tools meta-generate " $( MBTILES_LOCAL_FILE) " $( TILESET_FILE) --auto-minmax --show-ranges
2018-07-27 13:32:44 +00:00
2022-07-26 16:59:34 +00:00
.PHONY : data /tiles .txt
data/tiles.txt :
find ./data -name "*.tiles" -exec cat { } \; -exec rm { } \; | \
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools \
tile_multiplier $( MIN_ZOOM) $( MAX_ZOOM) >> data/tiles.txt
.PHONY : generate -changed -tiles
generate-changed-tiles : data /tiles .txt
# Re-generating updated tiles, if needed
if [ -s data/tiles.txt ] ; then \
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools refresh-views; \
$( DOCKER_COMPOSE) run $( DC_OPTS) -e LIST_FILE = data/tiles.txt openmaptiles-tools generate-tiles; \
rm data/tiles.txt; \
fi
2020-05-09 17:30:57 +00:00
.PHONY : start -tileserver
2022-11-28 14:56:00 +00:00
start-tileserver : init -dirs build -style download -fonts
2017-01-11 08:46:19 +00:00
@echo " "
2017-01-08 12:31:24 +00:00
@echo "***********************************************************"
2017-01-11 08:46:19 +00:00
@echo "* "
2020-04-21 17:51:03 +00:00
@echo "* Download/refresh maptiler/tileserver-gl docker image"
@echo "* see documentation: https://github.com/maptiler/tileserver-gl"
2017-01-11 08:46:19 +00:00
@echo "* "
2017-01-08 12:31:24 +00:00
@echo "***********************************************************"
2017-01-11 08:46:19 +00:00
@echo " "
2023-05-15 10:36:15 +00:00
$( DOCKER_COMPOSE_COMMAND) pull tileserver-gl
2017-01-11 08:46:19 +00:00
@echo " "
2017-01-08 12:31:24 +00:00
@echo "***********************************************************"
2017-01-11 08:46:19 +00:00
@echo "* "
2020-04-21 17:51:03 +00:00
@echo "* Start maptiler/tileserver-gl "
2020-05-26 19:19:52 +00:00
@echo " * ----------------------------> check $( OMT_HOST) : $( TPORT) "
2017-01-11 08:46:19 +00:00
@echo "* "
2017-01-08 12:31:24 +00:00
@echo "***********************************************************"
@echo " "
2022-02-22 11:43:59 +00:00
$( DOCKER_COMPOSE) up -d tileserver-gl
.PHONY : stop -tileserver
stop-tileserver :
$( DOCKER_COMPOSE) stop tileserver-gl
2018-07-27 13:32:44 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : start -postserve
start-postserve : start -db
2018-07-27 13:32:44 +00:00
@echo " "
@echo "***********************************************************"
@echo "* "
2020-05-26 19:19:52 +00:00
@echo " * Bring up postserve at $( OMT_HOST) : $( PPORT) "
2020-05-09 17:30:57 +00:00
@echo "* --> can view it locally (use make start-maputnik)"
2020-04-22 15:15:20 +00:00
@echo "* --> or can use https://maputnik.github.io/editor"
@echo "* "
2020-05-26 19:19:52 +00:00
@echo " * set data source / TileJSON URL to $( OMT_HOST) : $( PPORT) "
2018-07-27 13:32:44 +00:00
@echo "* "
@echo "***********************************************************"
@echo " "
2020-04-09 16:05:01 +00:00
$( DOCKER_COMPOSE) up -d postserve
2020-04-22 15:15:20 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : stop -postserve
stop-postserve :
2020-04-22 15:15:20 +00:00
$( DOCKER_COMPOSE) stop postserve
2020-05-09 17:30:57 +00:00
.PHONY : start -maputnik
start-maputnik : stop -maputnik start -postserve
2018-07-27 13:32:44 +00:00
@echo " "
@echo "***********************************************************"
@echo "* "
@echo "* Start maputnik/editor "
2020-05-15 04:08:37 +00:00
@echo " * ---> go to $( OMT_HOST) :8088 "
2020-05-26 19:19:52 +00:00
@echo " * ---> set data source / TileJSON URL to $( OMT_HOST) : $( PPORT) "
2018-07-27 13:32:44 +00:00
@echo "* "
@echo "***********************************************************"
@echo " "
2022-02-22 11:43:59 +00:00
$( DOCKER_COMPOSE) up -d maputnik_editor
2018-07-27 13:32:44 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : stop -maputnik
stop-maputnik :
2022-02-22 11:43:59 +00:00
-$( DOCKER_COMPOSE) stop maputnik_editor
2020-04-22 15:15:20 +00:00
2020-11-12 09:23:06 +00:00
# STAT_FUNCTION=frequency|toplength|variance
.PHONY : generate -qa
generate-qa : all start -db -nowait
@echo " "
@echo "e.g. make generate-qa STAT_FUNCTION=frequency LAYER=transportation ATTRIBUTE=class"
@echo " "
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools \
layer-stats $( STAT_FUNCTION) $( TILESET_FILE) $( LAYER) $( ATTRIBUTE) -m 0 -n 14 -v
2016-12-04 00:37:28 +00:00
2019-03-29 21:15:42 +00:00
# generate all etl and mapping graphs
2020-01-23 02:55:22 +00:00
.PHONY : generate -devdoc
2020-03-06 18:15:54 +00:00
generate-devdoc : init -dirs
2020-01-23 02:55:22 +00:00
mkdir -p ./build/devdoc && \
2020-04-22 06:48:57 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools sh -c \
2020-08-27 16:43:09 +00:00
' generate-etlgraph $( TILESET_FILE) $( GRAPH_PARAMS) && \
generate-mapping-graph $( TILESET_FILE) $( GRAPH_PARAMS) '
2018-07-27 07:53:39 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : bash
2020-05-20 15:27:42 +00:00
bash : init -dirs
2020-04-09 16:05:01 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools bash
2018-07-27 07:53:39 +00:00
2020-01-22 21:16:38 +00:00
.PHONY : import -wikidata
2020-05-20 15:27:42 +00:00
import-wikidata : init -dirs
2020-08-27 16:43:09 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools import-wikidata --cache /cache/wikidata-cache.json $( TILESET_FILE)
2018-07-27 13:32:44 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : reset -db -stats
2020-05-20 15:27:42 +00:00
reset-db-stats : init -dirs
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP = 1 -P pager = off -c 'SELECT pg_stat_statements_reset();'
2020-03-06 18:15:54 +00:00
.PHONY : list -views
2020-05-20 15:27:42 +00:00
list-views : init -dirs
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP = 1 -A -F"," -P pager = off -P footer = off \
Clean output of `list-views` and `list-table` (#985)
Avoid printing a fixed `public,` prefix in the output of `make list-views` and `make list-table`.
As `make help` says, `list-views` and `list-table` list only the public schema views and tables.
New output:
```
viewname
boundary_z0
boundary_z1
boundary_z10
boundary_z11
boundary_z12
boundary_z13
boundary_z3
boundary_z4
boundary_z5
boundary_z6
boundary_z7
boundary_z8
boundary_z9
geography_columns
geometry_columns
...
```
Existing output:
```
schemaname,viewname
public,boundary_z0
public,boundary_z1
public,boundary_z10
public,boundary_z11
public,boundary_z12
public,boundary_z13
public,boundary_z3
public,boundary_z4
public,boundary_z5
public,boundary_z6
public,boundary_z7
public,boundary_z8
public,boundary_z9
public,geography_columns
public,geometry_columns
...
```
2020-09-14 19:07:35 +00:00
-c "select viewname from pg_views where schemaname='public' order by viewname;"
2020-03-06 18:15:54 +00:00
.PHONY : list -tables
2020-05-20 15:27:42 +00:00
list-tables : init -dirs
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP = 1 -A -F"," -P pager = off -P footer = off \
Clean output of `list-views` and `list-table` (#985)
Avoid printing a fixed `public,` prefix in the output of `make list-views` and `make list-table`.
As `make help` says, `list-views` and `list-table` list only the public schema views and tables.
New output:
```
viewname
boundary_z0
boundary_z1
boundary_z10
boundary_z11
boundary_z12
boundary_z13
boundary_z3
boundary_z4
boundary_z5
boundary_z6
boundary_z7
boundary_z8
boundary_z9
geography_columns
geometry_columns
...
```
Existing output:
```
schemaname,viewname
public,boundary_z0
public,boundary_z1
public,boundary_z10
public,boundary_z11
public,boundary_z12
public,boundary_z13
public,boundary_z3
public,boundary_z4
public,boundary_z5
public,boundary_z6
public,boundary_z7
public,boundary_z8
public,boundary_z9
public,geography_columns
public,geometry_columns
...
```
2020-09-14 19:07:35 +00:00
-c "select tablename from pg_tables where schemaname='public' order by tablename;"
2018-07-27 13:32:44 +00:00
2020-03-06 18:15:54 +00:00
.PHONY : psql -list -tables
2020-05-20 15:27:42 +00:00
psql-list-tables : init -dirs
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP = 1 -P pager = off -c "\d+"
2018-07-27 13:32:44 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : vacuum -db
2020-05-20 15:27:42 +00:00
vacuum-db : init -dirs
2018-07-27 13:32:44 +00:00
@echo "Start - postgresql: VACUUM ANALYZE VERBOSE;"
2024-12-16 12:02:52 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP = 1 -P pager = off -c 'VACUUM (ANALYZE, VERBOSE);'
2018-07-27 13:32:44 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : analyze -db
2020-05-20 15:27:42 +00:00
analyze-db : init -dirs
2020-03-06 18:15:54 +00:00
@echo "Start - postgresql: ANALYZE VERBOSE;"
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP = 1 -P pager = off -c 'ANALYZE VERBOSE;'
2018-07-27 13:32:44 +00:00
2019-12-11 14:26:25 +00:00
.PHONY : list -docker -images
2018-07-27 13:32:44 +00:00
list-docker-images :
docker images | grep openmaptiles
2019-12-11 14:26:25 +00:00
.PHONY : refresh -docker -images
2020-05-20 15:27:42 +00:00
refresh-docker-images : init -dirs
2022-04-28 07:30:15 +00:00
i f n e q ( $( NO_REFRESH ) , )
2020-04-22 18:55:13 +00:00
@echo "Skipping docker image refresh"
e l s e
@echo ""
@echo "Refreshing docker images... Use NO_REFRESH=1 to skip."
2022-04-28 07:30:15 +00:00
i f n e q ( $( USE_PRELOADED_IMAGE ) , )
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
POSTGIS_IMAGE = openmaptiles/postgis-preloaded \
2023-03-15 21:16:00 +00:00
$( DOCKER_COMPOSE_COMMAND) pull --ignore-pull-failures $( QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
e l s e
2023-03-15 21:16:00 +00:00
$( DOCKER_COMPOSE_COMMAND) pull --ignore-pull-failures $( QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres import-data
Upgrade to tools v5 - rm import-osm, new downloader... (#785)
Update to tools v5. See https://github.com/openmaptiles/openmaptiles-tools/releases/tag/v5.0.0 for the list of all changes. Other OMT-repo specific changes:
* removes `import-osm` docker usage, replacing it with `openmaptiles-tools`
* quickstart builds faster because it uses postgres with preloaded water, natural earth, and lake centerlines tables.
### Makefile targets
* `tools-dev` will open a shell in a docker to experiment and debug (instead of `import-sql-dev` and `import-osm-dev`)
* separated `start-maputnik` from `start-postserve`
* renamed `clean-docker` into `db-destroy` to make it more explicit
* cleaner `db-start`, `db-stop`, `db-destroy` targets
* `db-start-preloaded` is the same as `db-start`, except that it uses `postgis-preloaded` -- an image with preloaded water, natural-earth, and lake centerline data
* `db-start` will not recreate the container if it already exists -- this way if it was started as preloaded, it will not be rebuilt.
* better output messages
### Quickstart
* uses `postgis-preloaded` image by default to make quickstart quicker. To start with a clean db, pass 2 parameters to quickstart, e.g. `./quickstart.sh albania empty`
2020-05-05 15:53:09 +00:00
e n d i f
2020-04-22 18:55:13 +00:00
e n d i f
2018-07-27 13:32:44 +00:00
2019-12-11 14:26:25 +00:00
.PHONY : remove -docker -images
2018-07-27 13:32:44 +00:00
remove-docker-images :
@echo "Deleting all openmaptiles related docker image(s)..."
2020-04-09 16:05:01 +00:00
@$( DOCKER_COMPOSE) down
2020-04-21 17:36:07 +00:00
@docker images "openmaptiles/*" -q | $( XARGS) docker rmi -f
@docker images "maputnik/editor" -q | $( XARGS) docker rmi -f
2020-04-21 17:51:03 +00:00
@docker images "maptiler/tileserver-gl" -q | $( XARGS) docker rmi -f
2018-07-27 13:32:44 +00:00
2020-05-09 17:30:57 +00:00
.PHONY : clean -unnecessary -docker
clean-unnecessary-docker :
2018-07-27 13:32:44 +00:00
@echo "Deleting unnecessary container(s)..."
2020-06-08 16:40:04 +00:00
@docker ps -a -q --filter "status=exited" | $( XARGS) docker rm
2018-07-27 13:32:44 +00:00
@echo "Deleting unnecessary image(s)..."
2021-03-16 16:20:25 +00:00
@docker images | awk -F" " '/<none>/{print $$3}' | $( XARGS) docker rmi
2020-04-09 14:49:35 +00:00
.PHONY : test -perf -null
2020-05-20 15:27:42 +00:00
test-perf-null : init -dirs
2020-08-27 16:43:09 +00:00
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools test-perf $( TILESET_FILE) --test null --no-color
2020-05-06 15:36:15 +00:00
.PHONY : build -test -pbf
2020-05-20 15:27:42 +00:00
build-test-pbf : init -dirs
2023-03-15 21:16:00 +00:00
$( DOCKER_COMPOSE_COMMAND) run $( DC_OPTS) openmaptiles-tools /tileset/.github/workflows/build-test-data.sh
2020-10-26 16:39:49 +00:00
.PHONY : debug
debug : ## Use this target when developing Makefile itself to verify loaded environment variables
@$( assert_area_is_given)
@echo file_exists = $( wildcard $( AREA_BBOX_FILE) )
@echo AREA_BBOX_FILE = $( AREA_BBOX_FILE) , $$ AREA_ENV_FILE
@echo BBOX = $( BBOX) , $$ BBOX
@echo MIN_ZOOM = $( MIN_ZOOM) , $$ MIN_ZOOM
@echo MAX_ZOOM = $( MAX_ZOOM) , $$ MAX_ZOOM
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
build/import-tests.osm.pbf : init -dirs
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'osmconvert tests/import/*.osm -o=build/import-tests.osm.pbf'
data/changes.state.txt :
cp -f tests/changes.state.txt data/
data/last.state.txt :
cp -f tests/last.state.txt data/
data/changes.repl.json :
cp -f tests/changes.repl.json data/
data/changes.osc.gz : init -dirs
@echo " UPDATE unit test data..."
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'osmconvert tests/update/*.osc --merge-versions -o=data/changes.osc && gzip -f data/changes.osc'
2021-11-11 07:27:01 +00:00
test-sql : clean refresh -docker -images destroy -db start -db -nowait build /import -tests .osm .pbf data /changes .state .txt data /last .state .txt data /changes .repl .json build /mapping .yaml data /changes .osc .gz build /openmaptiles .tm 2source /data .yml build /mapping .yaml build -sql
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
$( eval area := changes)
@echo "Load IMPORT test data"
sed -ir "s/^[#]*\s*MAX_ZOOM=.*/MAX_ZOOM=14/" .env
sed -ir "s/^[#]*\s*DIFF_MODE=.*/DIFF_MODE=false/" .env
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-osm build/import-tests.osm.pbf'
2021-11-11 07:27:01 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) import-data
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
@echo "Apply OpenMapTiles SQL schema to test data @ Zoom 14..."
$( DOCKER_COMPOSE) run $( DC_OPTS) openmaptiles-tools sh -c 'pgwait && import-sql' | \
awk -v s = ": WARNING:" '1{print; fflush()} $$0~s{print "\n*** WARNING detected, aborting"; exit(1)}' | \
awk '1{print; fflush()} $$0~".*ERROR" {txt=$$0} END{ if(txt){print "\n*** ERROR detected, aborting:"; print txt; exit(1)} }'
@echo "Test SQL output for Import Test Data"
2023-03-22 06:56:34 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && psql.sh < tests/test-post-import.sql' 2>& 1 | \
awk -v s = "ERROR:" '1{print; fflush()} $$0~s{print "*** ERROR detected, aborting"; exit(1)}'
Framework for SQL-based unit tests for import and updates (#1249)
This PR adds the ability to create SQL tests that ensure that OSM data is properly imported and updated in the OpenMapTiles data schema. The tests work by injecting test OSM data and updates into the database and checking to ensure that the data is properly loaded into the database using standard SQL statements. With this framework in place, developers can now write small tests to inject known data into the database and ensure that imports and updates are working correctly.
In addition to the framework, basic tests are provided for four layers. These initial tests are in no way comprehensive, but they provide a structure and framework for developers to add their own tests or expand the existing ones to cover more cases.
Usage:
`make clean && make sql-test`
## How it works
The SQL tests consist of the following parts:
1. **Test import data**, located in `tests/import`. This test data is in the [OSM XML](https://wiki.openstreetmap.org/wiki/OSM_XML) format and contains the data that should be initially injected into the database. The files are numbered in order to ensure that each test data file contains OSM id numbers that are distinct from the other files. For example, the file starting with `100` will use node ids from 100000-199999, way ids from 1000-1999, and relation ids from 100-199.
1. **Test update data**, located in `tests/update`. This test data is in the [osmChange XML](https://wiki.openstreetmap.org/wiki/OsmChange) format, and contains the data that will be used to update the test import data (in order to verify that the update process is working correctly. These files are also numbered using the same scheme as the test import data.
1. **Import SQL test script**, located at `tests/test-post-import.sql`. This script is executed after the test import data has been injected, and runs SQL-based checks to ensure that the import data was properly imported. If there are failures in the tests, an entry will be added to the table `omt_test_failures`, with one record per error that occurs during the import process. A test failure will also fail the build. To inspect the test failure messages, run `make psql` and issue the comment `SELECT * FROM omt_test_failures`.
1. **Update SQL test script**, located at `tests/test-post-update.sql`. This script performs the same function as the import test script, except that it occurs after the test update data has been applied to the database. Note that script will only run if the import script passes all tests.
2021-10-28 11:38:46 +00:00
@echo "Run UPDATE process on test data..."
sed -ir "s/^[#]*\s*DIFF_MODE=.*/DIFF_MODE=true/" .env
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-diff'
@echo "Test SQL output for Update Test Data"
2023-03-22 06:56:34 +00:00
$( DOCKER_COMPOSE) $( DC_CONFIG_CACHE) run $( DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && psql.sh < tests/test-post-update.sql' 2>& 1 | \
awk -v s = "ERROR:" '1{print; fflush()} $$0~s{print "*** ERROR detected, aborting"; exit(1)}'