Merge branch 'main' into overture-generic

overture-generic
Mike Barry 2023-12-18 07:27:42 -05:00
commit 492490a33e
102 zmienionych plików z 1454 dodań i 958 usunięć

Wyświetl plik

@ -18,10 +18,10 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: true
- name: Set up JDK 17
uses: actions/setup-java@v3
- name: Set up JDK 21
uses: actions/setup-java@v4
with:
java-version: 17
java-version: 21
distribution: 'temurin'
cache: 'maven'
- name: Ensure code formatted with mvn spotless:apply
@ -34,13 +34,14 @@ jobs:
fail-fast: false
matrix:
os: [ ubuntu-latest, macos-latest, windows-latest ]
jdk: [ 17 ]
jdk: [ 21 ]
include:
- os: ubuntu-latest
jdk: 17
jdk: 21
args: "-DargLine='-Duser.language=fr -Duser.country=FR'"
- os: ubuntu-latest
jdk: 20
jdk: 21
args: ""
runs-on: ${{ matrix.os }}
timeout-minutes: 15
steps:
@ -48,7 +49,7 @@ jobs:
with:
submodules: true
- name: Set up JDK ${{ matrix.jdk }}
uses: actions/setup-java@v3
uses: actions/setup-java@v4
with:
java-version: ${{ matrix.jdk }}
distribution: 'temurin'
@ -71,10 +72,10 @@ jobs:
- uses: actions/checkout@v4
with:
submodules: true
- name: Set up JDK 17
uses: actions/setup-java@v3
- name: Set up JDK 21
uses: actions/setup-java@v4
with:
java-version: 17
java-version: 21
distribution: 'temurin'
- name: Build and test
run: mvn --batch-mode -no-transfer-progress package --file standalone.pom.xml
@ -100,9 +101,9 @@ jobs:
- name: Cache data/sources
uses: ./.github/cache-sources-action
- name: Set up JDK
uses: actions/setup-java@v3
uses: actions/setup-java@v4
with:
java-version: 17
java-version: 21
distribution: 'temurin'
cache: 'maven'
@ -110,7 +111,7 @@ jobs:
run: ./mvnw -DskipTests -Dimage.version=CI_ONLY --batch-mode -no-transfer-progress package jib:dockerBuild --file pom.xml
- name: 'Upload artifact'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: planetiler-build
path: planetiler-dist/target/*with-deps.jar
@ -137,9 +138,9 @@ jobs:
- name: Cache data/sources
uses: ./.github/cache-sources-action
- name: Set up JDK
uses: actions/setup-java@v3
uses: actions/setup-java@v4
with:
java-version: 17
java-version: 21
distribution: 'temurin'
cache: 'maven'

Wyświetl plik

@ -42,12 +42,12 @@ jobs:
with:
basedir: branch
- name: Set up JDK
uses: actions/setup-java@v3
uses: actions/setup-java@v4
with:
java-version: 17
java-version: 21
distribution: 'temurin'
cache: 'maven'
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: '14'
- run: npm install -g strip-ansi-cli@3.0.2
@ -88,7 +88,7 @@ jobs:
cat log | strip-ansi > build-info/baselogs.txt
- name: 'Upload build-info'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: build-info
path: ./build-info

Wyświetl plik

@ -18,65 +18,65 @@ jobs:
contents: write
packages: write
steps:
- name: Ensure version does not start with 'v'
uses: actions/github-script@v6
with:
github-token: ${{ github.token }}
script: |
version = context.payload.inputs.version;
if (/^v/.test(version)) throw new Error("Bad version number: " + version)
- uses: actions/checkout@v4
with:
submodules: true
- name: Cache data/sources
uses: ./.github/cache-sources-action
- uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
cache: 'maven'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- name: Ensure version does not start with 'v'
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
script: |
version = context.payload.inputs.version;
if (/^v/.test(version)) throw new Error("Bad version number: " + version)
- uses: actions/checkout@v4
with:
submodules: true
- name: Cache data/sources
uses: ./.github/cache-sources-action
- uses: actions/setup-java@v4
with:
java-version: '21'
distribution: 'temurin'
cache: 'maven'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- name: Check tag does not exist yet
run: if git rev-list "v${{ github.event.inputs.version }}"; then echo "Tag already exists. Aborting the release process."; exit 1; fi
- name: Check tag does not exist yet
run: if git rev-list "v${{ github.event.inputs.version }}"; then echo "Tag already exists. Aborting the release process."; exit 1; fi
- run: ./scripts/set-versions.sh "${{ github.event.inputs.version }}"
- run: ./scripts/build-release.sh
- run: ./scripts/test-release.sh "${{ github.event.inputs.version }}"
- name: Create tag
uses: actions/github-script@v6
with:
github-token: ${{ github.token }}
script: |
github.rest.git.createRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: "refs/tags/v${{ github.event.inputs.version }}",
sha: context.sha
})
- run: mv planetiler-dist/target/*with-deps.jar planetiler.jar
- run: sha256sum planetiler.jar > planetiler.jar.sha256
- run: md5sum planetiler.jar > planetiler.jar.md5
- name: Install GPG Private Key
run: |
echo -n "${{ secrets.OSSRH_GPG_SECRET_KEY }}" | base64 --decode | gpg --batch --import
- name: Create Release
uses: softprops/action-gh-release@v1
with:
fail_on_unmatched_files: true
tag_name: v${{ github.event.inputs.version }}
draft: true
files: |
planetiler.jar*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: ./scripts/push-release.sh ${{ github.event.inputs.version }}
env:
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_TAGS: ${{ github.event.inputs.image_tags }}
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
OSSRH_GPG_SECRET_KEY_PASSWORD: ${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }}
- run: ./scripts/set-versions.sh "${{ github.event.inputs.version }}"
- run: ./scripts/build-release.sh
- run: ./scripts/test-release.sh "${{ github.event.inputs.version }}"
- name: Create tag
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
script: |
github.rest.git.createRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: "refs/tags/v${{ github.event.inputs.version }}",
sha: context.sha
})
- run: mv planetiler-dist/target/*with-deps.jar planetiler.jar
- run: sha256sum planetiler.jar > planetiler.jar.sha256
- run: md5sum planetiler.jar > planetiler.jar.md5
- name: Install GPG Private Key
run: |
echo -n "${{ secrets.OSSRH_GPG_SECRET_KEY }}" | base64 --decode | gpg --batch --import
- name: Create Release
uses: softprops/action-gh-release@v1
with:
fail_on_unmatched_files: true
tag_name: v${{ github.event.inputs.version }}
draft: true
files: |
planetiler.jar*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: ./scripts/push-release.sh ${{ github.event.inputs.version }}
env:
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_TAGS: ${{ github.event.inputs.image_tags }}
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
OSSRH_GPG_SECRET_KEY_PASSWORD: ${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }}

Wyświetl plik

@ -21,37 +21,37 @@ jobs:
contents: read
packages: write
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Cache data/sources
uses: ./.github/cache-sources-action
- name: Set up JDK
uses: actions/setup-java@v3
with:
java-version: 17
distribution: 'temurin'
cache: 'maven'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- run: ./scripts/build-release.sh
- run: ./scripts/test-release.sh
- run: sha256sum planetiler-dist/target/*with-deps.jar
- run: md5sum planetiler-dist/target/*with-deps.jar
- name: 'Upload artifact'
uses: actions/upload-artifact@v3
with:
name: planetiler-build
path: planetiler-dist/target/*with-deps.jar
- name: Install GPG Private Key
run: |
echo -n "${{ secrets.OSSRH_GPG_SECRET_KEY }}" | base64 --decode | gpg --batch --import
- run: ./scripts/push-release.sh
env:
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_TAGS: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.image_tags || 'latest,snapshot' }}
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
OSSRH_GPG_SECRET_KEY_PASSWORD: ${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }}
- uses: actions/checkout@v4
with:
submodules: true
- name: Cache data/sources
uses: ./.github/cache-sources-action
- name: Set up JDK
uses: actions/setup-java@v4
with:
java-version: 21
distribution: 'temurin'
cache: 'maven'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- run: ./scripts/build-release.sh
- run: ./scripts/test-release.sh
- run: sha256sum planetiler-dist/target/*with-deps.jar
- run: md5sum planetiler-dist/target/*with-deps.jar
- name: 'Upload artifact'
uses: actions/upload-artifact@v4
with:
name: planetiler-build
path: planetiler-dist/target/*with-deps.jar
- name: Install GPG Private Key
run: |
echo -n "${{ secrets.OSSRH_GPG_SECRET_KEY }}" | base64 --decode | gpg --batch --import
- run: ./scripts/push-release.sh
env:
GITHUB_ACTOR: ${{ github.actor }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_TAGS: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.image_tags || 'latest,snapshot' }}
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
OSSRH_GPG_SECRET_KEY_PASSWORD: ${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }}

Wyświetl plik

@ -15,66 +15,66 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/checkout@v4
with:
# Disabling shallow clone is recommended for improving relevancy of reporting
fetch-depth: 0
submodules: true
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: 17
distribution: 'temurin'
cache: 'maven'
- name: Cache SonarCloud packages
uses: actions/cache@v3
with:
path: ~/.sonar/cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
- name: Analyze with SonarCloud
run: |
mvn -Dspotless.check.skip -Pcoverage -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
env:
# Needed to get some information about the pull request, if any
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Read-only user, use this token to link SonarLint to SonarCloud as well
SONAR_TOKEN: c2cfe8bd7368ced07e84a620b7c2487846e220eb
- name: Wait for SonarCloud API to update...
run: "sleep 10"
- name: Upload annotations on PRs
if: ${{ github.event_name == 'pull_request' }}
uses: actions/github-script@v6
with:
github-token: ${{ github.token }}
script: |
const pr = context.payload.pull_request.number;
const url = `https://sonarcloud.io/api/issues/search?pullRequest=${pr}&s=FILE_LINE&resolved=false&sinceLeakPeriod=true&ps=100&facets=severities%2Ctypes&componentKeys=onthegomap_planetiler&organization=onthegomap&additionalFields=_all`;
console.log("Fetching " + url);
const response = await github.request(url);
console.log("Got " + JSON.stringify(response.data));
response.data.issues.forEach(issue => {
try {
if (issue.severity === 'INFO') return;
const textRange = issue.textRange;
const rule = encodeURIComponent(issue.rule);
const message = [
issue.message,
'',
`rule: ${issue.rule} (https://sonarcloud.io/organizations/onthegomap/rules?open=${rule}&rule_key=${rule})`,
`issue url: https://sonarcloud.io/project/issues?pullRequest=${pr}&open=${encodeURIComponent(issue.key)}&id=onthegomap_planetiler`
].join('\n');
const args = {
title: `${issue.severity} ${issue.type}`,
file: issue.component.replace(/^[^:]*:/, ''),
startLine: textRange.startLine,
endLine: textRange.endLine,
startColumn: textRange.startOffset,
endColumn: textRange.endOffset
};
core.warning(message, args);
console.log(args);
} catch (e) {
core.error(`Unable to parse sonar issue: ${JSON.stringify(issue)}`);
}
});
- uses: actions/checkout@v4
with:
# Disabling shallow clone is recommended for improving relevancy of reporting
fetch-depth: 0
submodules: true
- name: Set up JDK 21
uses: actions/setup-java@v4
with:
java-version: 21
distribution: 'temurin'
cache: 'maven'
- name: Cache SonarCloud packages
uses: actions/cache@v3
with:
path: ~/.sonar/cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
- name: Analyze with SonarCloud
run: |
mvn -Dspotless.check.skip -Pcoverage -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
env:
# Needed to get some information about the pull request, if any
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Read-only user, use this token to link SonarLint to SonarCloud as well
SONAR_TOKEN: c2cfe8bd7368ced07e84a620b7c2487846e220eb
- name: Wait for SonarCloud API to update...
run: "sleep 10"
- name: Upload annotations on PRs
if: ${{ github.event_name == 'pull_request' }}
uses: actions/github-script@v7
with:
github-token: ${{ github.token }}
script: |
const pr = context.payload.pull_request.number;
const url = `https://sonarcloud.io/api/issues/search?pullRequest=${pr}&s=FILE_LINE&resolved=false&sinceLeakPeriod=true&ps=100&facets=severities%2Ctypes&componentKeys=onthegomap_planetiler&organization=onthegomap&additionalFields=_all`;
console.log("Fetching " + url);
const response = await github.request(url);
console.log("Got " + JSON.stringify(response.data));
response.data.issues.forEach(issue => {
try {
if (issue.severity === 'INFO') return;
const textRange = issue.textRange;
const rule = encodeURIComponent(issue.rule);
const message = [
issue.message,
'',
`rule: ${issue.rule} (https://sonarcloud.io/organizations/onthegomap/rules?open=${rule}&rule_key=${rule})`,
`issue url: https://sonarcloud.io/project/issues?pullRequest=${pr}&open=${encodeURIComponent(issue.key)}&id=onthegomap_planetiler`
].join('\n');
const args = {
title: `${issue.severity} ${issue.type}`,
file: issue.component.replace(/^[^:]*:/, ''),
startLine: textRange.startLine,
endLine: textRange.endLine,
startColumn: textRange.startOffset,
endColumn: textRange.endOffset
};
core.warning(message, args);
console.log(args);
} catch (e) {
core.error(`Unable to parse sonar issue: ${JSON.stringify(issue)}`);
}
});

Wyświetl plik

@ -22,7 +22,7 @@ jobs:
with:
submodules: true
- name: 'Download branch build info'
uses: dawidd6/action-download-artifact@v2
uses: dawidd6/action-download-artifact@v3
with:
workflow: ${{ github.event.workflow_run.workflow_id }}
run_id: ${{ github.event.workflow_run.id }}

Wyświetl plik

@ -11,12 +11,12 @@ Pull requests are welcome! Any pull request should:
To set up your local development environment:
- Fork the repo [setup submodules](README.md#git-submodules)
- Install Java 17 or later. You can download Java manually from [Adoptium](https://adoptium.net/installation.html) or
- Install Java 21 or later. You can download Java manually from [Adoptium](https://adoptium.net/installation.html) or
use:
- [Windows installer](https://adoptium.net/installation.html#windows-msi)
- [macOS installer](https://adoptium.net/installation.html#macos-pkg) (or `brew install --cask temurin`,
or `port install openjdk17-temurin`)
- [Linux installer](https://adoptium.net/installation/linux/) (or `apt-get install openjdk-17-jdk`)
or `port install openjdk21-temurin`)
- [Linux installer](https://adoptium.net/installation/linux/) (or `apt-get install openjdk-21-jdk`)
- Build and run the tests ([mvnw](https://github.com/takari/maven-wrapper) automatically downloads maven the first time
you run it):
- on mac/linux: `./mvnw clean test`
@ -54,7 +54,7 @@ Troubleshooting:
- If any java source files show "Cannot resolve symbol..." errors for Planetiler classes, you might need to
select: `File -> Invalidate Caches... -> Just Restart`.
- If you see a "Project JDK is not defined" error, then choose `Setup SDK` and point IntelliJ at the Java 17 or later
- If you see a "Project JDK is not defined" error, then choose `Setup SDK` and point IntelliJ at the Java 21 or later
installed on your system
### Visual Studio Code

Wyświetl plik

@ -3,9 +3,9 @@
To generate a map of the world using the
built-in [OpenMapTiles profile](https://github.com/openmaptiles/planetiler-openmaptiles), you will need a
machine with
Java 17 or later installed and at least 10x as much disk space and at least 0.5x as much RAM as the `planet.osm.pbf`
Java 21 or later installed and at least 10x as much disk space and at least 0.5x as much RAM as the `planet.osm.pbf`
file you start from. All testing has been done using Digital Ocean droplets with dedicated
vCPUs ([referral link](https://m.do.co/c/a947e99aab25)) and OpenJDK 17 installed through `apt`. Planetiler splits work
vCPUs ([referral link](https://m.do.co/c/a947e99aab25)) and OpenJDK 21 installed through `apt`. Planetiler splits work
among available CPUs so the more you have, the less time it takes.
### 1) Choose the Data Source
@ -84,10 +84,10 @@ To generate the tiles shown on https://onthegomap.github.io/planetiler-demo/ I u
S3 snapshot, then ran Planetiler on a Digital Ocean Memory-Optimized droplet with 16 CPUs, 128GB RAM, and 1.17TB disk
running Ubuntu 21.04 x64 in the nyc3 location.
First, I installed java 17 jre and screen:
First, I installed java 21 jre and screen:
```bash
apt-get update && apt-get install -y openjdk-17-jre-headless screen
apt-get update && apt-get install -y openjdk-21-jre-headless screen
```
Then I added a script `runworld.sh` to run with 100GB of RAM:

Wyświetl plik

@ -33,7 +33,7 @@ the [OpenStreetMap Americana Project](https://github.com/ZeLonewolf/openstreetma
To generate a map of an area using the [OpenMapTiles profile](https://github.com/openmaptiles/planetiler-openmaptiles),
you will need:
- Java 17+ (see [CONTRIBUTING.md](CONTRIBUTING.md)) or [Docker](https://docs.docker.com/get-docker/)
- Java 21+ (see [CONTRIBUTING.md](CONTRIBUTING.md)) or [Docker](https://docs.docker.com/get-docker/)
- at least 1GB of free disk space plus 5-10x the size of the `.osm.pbf` file
- at least 0.5x as much free RAM as the input `.osm.pbf` file size

Wyświetl plik

@ -47,7 +47,7 @@ public class BenchmarkMbtilesRead {
List<TileCoord> randomCoordsToFetchPerRepetition = new LinkedList<>();
do {
try (var db = Mbtiles.newReadOnlyDatabase(mbtilesPaths.get(0))) {
try (var db = Mbtiles.newReadOnlyDatabase(mbtilesPaths.getFirst())) {
try (var statement = db.connection().prepareStatement(SELECT_RANDOM_COORDS)) {
statement.setInt(1, nrTileReads - randomCoordsToFetchPerRepetition.size());
var rs = statement.executeQuery();

Wyświetl plik

@ -16,11 +16,11 @@
</parent>
<properties>
<geotools.version>30.0</geotools.version>
<log4j.version>2.20.0</log4j.version>
<geotools.version>30.1</geotools.version>
<log4j.version>2.22.0</log4j.version>
<prometheus.version>0.16.0</prometheus.version>
<protobuf.version>3.24.4</protobuf.version>
<geopackage.version>6.6.3</geopackage.version>
<protobuf.version>3.25.1</protobuf.version>
<geopackage.version>6.6.4</geopackage.version>
</properties>
<dependencies>
@ -32,7 +32,7 @@
<dependency>
<groupId>org.roaringbitmap</groupId>
<artifactId>RoaringBitmap</artifactId>
<version>1.0.0</version>
<version>1.0.1</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
@ -67,7 +67,7 @@
<dependency>
<groupId>org.xerial</groupId>
<artifactId>sqlite-jdbc</artifactId>
<version>3.43.0.0</version>
<version>3.44.1.0</version>
</dependency>
<dependency>
<groupId>org.msgpack</groupId>
@ -142,7 +142,7 @@
<dependency>
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
<version>73.2</version>
<version>74.2</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>

Wyświetl plik

@ -15,6 +15,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.locationtech.jts.algorithm.construct.MaximumInscribedCircle;
import org.locationtech.jts.geom.Geometry;
/**
@ -22,16 +23,13 @@ import org.locationtech.jts.geom.Geometry;
* feature.
* <p>
* For example to add a polygon feature for a lake and a center label point with its name:
*
* <pre>
* {@code
* {@snippet :
* featureCollector.polygon("water")
* .setAttr("class", "lake");
* featureCollector.centroid("water_name")
* .setAttr("class", "lake")
* .setAttr("name", element.getString("name"));
* }
* </pre>
*/
public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
@ -177,15 +175,55 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
}
}
public Feature innermostPoint(String layer) {
/**
* Starts building a new point map feature at the furthest interior point of a polygon from its edge using
* {@link MaximumInscribedCircle} (aka "pole of inaccessibility") of the source feature.
* <p>
* NOTE: This is substantially more expensive to compute than {@link #centroid(String)} or
* {@link #pointOnSurface(String)}, especially for small {@code tolerance} values.
*
* @param layer the output vector tile layer this feature will be written to
* @param tolerance precision for calculating maximum inscribed circle. 0.01 means 1% of the square root of the area.
* Smaller values for a more precise tolerance become very expensive to compute. Values between 5%
* and 10% are a good compromise of performance vs. precision.
* @return a feature that can be configured further.
*/
public Feature innermostPoint(String layer, double tolerance) {
try {
return geometry(layer, source.innermostPoint());
return geometry(layer, source.innermostPoint(tolerance));
} catch (GeometryException e) {
e.log(stats, "feature_innermost_point", "Error getting innermost point for " + source.id() + " layer=" + layer);
e.log(stats, "feature_innermost_point", "Error constructing innermost point for " + source.id());
return new Feature(layer, EMPTY_GEOM, source.id());
}
}
/** Alias for {@link #innermostPoint(String, double)} with a default tolerance of 10%. */
public Feature innermostPoint(String layer) {
return innermostPoint(layer, 0.1);
}
/** Returns the minimum zoom level at which this feature is at least {@code pixelSize} pixels large. */
public int getMinZoomForPixelSize(double pixelSize) {
try {
return GeoUtils.minZoomForPixelSize(source.size(), pixelSize);
} catch (GeometryException e) {
e.log(stats, "min_zoom_for_size_failure", "Error getting min zoom for size from geometry " + source.id());
return config.maxzoom();
}
}
/** Returns the actual pixel size of the source feature at {@code zoom} (length if line, sqrt(area) if polygon). */
public double getPixelSizeAtZoom(int zoom) {
try {
return source.size() * (256 << zoom);
} catch (GeometryException e) {
e.log(stats, "source_feature_pixel_size_at_zoom_failure",
"Error getting source feature pixel size at zoom from geometry " + source.id());
return 0;
}
}
/**
* Creates new feature collector instances for each source feature that we encounter.
*/
@ -244,6 +282,10 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
this.geom = geom;
this.geometryType = GeometryType.typeOf(geom);
this.id = id;
if (geometryType == GeometryType.POINT) {
minPixelSizeAtMaxZoom = 0;
defaultMinPixelSize = 0;
}
}
/** Returns the original ID of the source feature that this feature came from (i.e. OSM node/way ID). */
@ -273,8 +315,8 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
/**
* Sets the value by which features are sorted within a layer in the output vector tile. Sort key gets packed into
* {@link FeatureGroup#SORT_KEY_BITS} bits so the range of this is limited to {@code -(2^(bits-1))} to {@code
* (2^(bits-1))-1}.
* {@link FeatureGroup#SORT_KEY_BITS} bits so the range of this is limited to {@code -(2^(bits-1))} to
* {@code (2^(bits-1))-1}.
* <p>
* Circles, lines, and polygons are rendered in the order they appear in each layer, so features that appear later
* (higher sort key) show up on top of features with a lower sort key.
@ -685,6 +727,29 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
return setAttr(key, ZoomFunction.minZoom(minzoom, value));
}
/**
* Sets the value for {@code key} only at zoom levels where the feature is at least {@code minPixelSize} pixels in
* size.
*/
public Feature setAttrWithMinSize(String key, Object value, double minPixelSize) {
return setAttrWithMinzoom(key, value, getMinZoomForPixelSize(minPixelSize));
}
/**
* Sets the value for {@code key} so that it always shows when {@code zoom_level >= minZoomToShowAlways} but only
* shows when {@code minZoomIfBigEnough <= zoom_level < minZoomToShowAlways} when it is at least
* {@code minPixelSize} pixels in size.
* <p>
* If you need more flexibility, use {@link #getMinZoomForPixelSize(double)} directly, or create a
* {@link ZoomFunction} that calculates {@link #getPixelSizeAtZoom(int)} and applies a custom threshold based on the
* zoom level.
*/
public Feature setAttrWithMinSize(String key, Object value, double minPixelSize, int minZoomIfBigEnough,
int minZoomToShowAlways) {
return setAttrWithMinzoom(key, value,
Math.clamp(getMinZoomForPixelSize(minPixelSize), minZoomIfBigEnough, minZoomToShowAlways));
}
/**
* Inserts all key/value pairs in {@code attrs} into the set of attribute to emit on the output feature at or above
* {@code minzoom}.
@ -720,6 +785,14 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
return this;
}
/**
* Returns the attribute key that the renderer should use to store the number of points in the simplified geometry
* before slicing it into tiles.
*/
public String getNumPointsAttr() {
return numPointsAttr;
}
/**
* Sets a special attribute key that the renderer will use to store the number of points in the simplified geometry
* before slicing it into tiles.
@ -729,14 +802,6 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
return this;
}
/**
* Returns the attribute key that the renderer should use to store the number of points in the simplified geometry
* before slicing it into tiles.
*/
public String getNumPointsAttr() {
return numPointsAttr;
}
@Override
public String toString() {
return "Feature{" +
@ -745,5 +810,10 @@ public class FeatureCollector implements Iterable<FeatureCollector.Feature> {
", attrs=" + attrs +
'}';
}
/** Returns the actual pixel size of the source feature at {@code zoom} (length if line, sqrt(area) if polygon). */
public double getSourceFeaturePixelSizeAtZoom(int zoom) {
return getPixelSizeAtZoom(zoom);
}
}
}

Wyświetl plik

@ -28,6 +28,8 @@ import org.locationtech.jts.geom.LineString;
import org.locationtech.jts.geom.LinearRing;
import org.locationtech.jts.geom.Polygon;
import org.locationtech.jts.geom.Polygonal;
import org.locationtech.jts.geom.TopologyException;
import org.locationtech.jts.geom.util.GeometryFixer;
import org.locationtech.jts.index.strtree.STRtree;
import org.locationtech.jts.operation.buffer.BufferOp;
import org.locationtech.jts.operation.buffer.BufferParameters;
@ -124,7 +126,7 @@ public class FeatureMerge {
List<VectorTile.Feature> result = new ArrayList<>(features.size());
var groupedByAttrs = groupByAttrs(features, result, geometryType);
for (List<VectorTile.Feature> groupedFeatures : groupedByAttrs) {
VectorTile.Feature feature1 = groupedFeatures.get(0);
VectorTile.Feature feature1 = groupedFeatures.getFirst();
if (groupedFeatures.size() == 1) {
result.add(feature1);
} else {
@ -158,7 +160,7 @@ public class FeatureMerge {
List<VectorTile.Feature> result = new ArrayList<>(features.size());
var groupedByAttrs = groupByAttrs(features, result, GeometryType.LINE);
for (List<VectorTile.Feature> groupedFeatures : groupedByAttrs) {
VectorTile.Feature feature1 = groupedFeatures.get(0);
VectorTile.Feature feature1 = groupedFeatures.getFirst();
double lengthLimit = lengthLimitCalculator.apply(feature1.attrs());
// as a shortcut, can skip line merging only if:
@ -300,7 +302,7 @@ public class FeatureMerge {
Collection<List<VectorTile.Feature>> groupedByAttrs = groupByAttrs(features, result, GeometryType.POLYGON);
for (List<VectorTile.Feature> groupedFeatures : groupedByAttrs) {
List<Polygon> outPolygons = new ArrayList<>();
VectorTile.Feature feature1 = groupedFeatures.get(0);
VectorTile.Feature feature1 = groupedFeatures.getFirst();
List<Geometry> geometries = new ArrayList<>(groupedFeatures.size());
for (var feature : groupedFeatures) {
try {
@ -322,7 +324,7 @@ public class FeatureMerge {
// spinning for a very long time on very dense tiles.
// TODO use some heuristic to choose bufferUnbuffer vs. bufferUnionUnbuffer based on the number small
// polygons in the group?
merged = bufferUnionUnbuffer(buffer, polygonGroup);
merged = bufferUnionUnbuffer(buffer, polygonGroup, stats);
} else {
merged = buffer(buffer, GeoUtils.createGeometryCollection(polygonGroup));
}
@ -331,7 +333,7 @@ public class FeatureMerge {
}
merged = GeoUtils.snapAndFixPolygon(merged, stats, "merge").reverse();
} else {
merged = polygonGroup.get(0);
merged = polygonGroup.getFirst();
if (!(merged instanceof Polygonal) || merged.getEnvelopeInternal().getArea() < minArea) {
continue;
}
@ -410,7 +412,7 @@ public class FeatureMerge {
* Merges nearby polygons by expanding each individual polygon by {@code buffer}, unioning them, and contracting the
* result.
*/
private static Geometry bufferUnionUnbuffer(double buffer, List<Geometry> polygonGroup) {
static Geometry bufferUnionUnbuffer(double buffer, List<Geometry> polygonGroup, Stats stats) {
/*
* A simpler alternative that might initially appear faster would be:
*
@ -424,11 +426,20 @@ public class FeatureMerge {
* The following approach is slower most of the time, but faster on average because it does
* not choke on dense nearby polygons:
*/
for (int i = 0; i < polygonGroup.size(); i++) {
polygonGroup.set(i, buffer(buffer, polygonGroup.get(i)));
List<Geometry> buffered = new ArrayList<>(polygonGroup.size());
for (Geometry geometry : polygonGroup) {
buffered.add(buffer(buffer, geometry));
}
Geometry merged = GeoUtils.createGeometryCollection(buffered);
try {
merged = union(merged);
} catch (TopologyException e) {
// buffer result is sometimes invalid, which makes union throw so fix
// it and try again (see #700)
stats.dataError("buffer_union_unbuffer_union_failed");
merged = GeometryFixer.fix(merged);
merged = union(merged);
}
Geometry merged = GeoUtils.createGeometryCollection(polygonGroup);
merged = union(merged);
merged = unbuffer(buffer, merged);
return merged;
}
@ -572,5 +583,5 @@ public class FeatureMerge {
return result;
}
private record WithIndex<T> (T feature, int hilbert) {}
private record WithIndex<T>(T feature, int hilbert) {}
}

Wyświetl plik

@ -907,7 +907,7 @@ public class Planetiler {
private void download() {
var timer = stats.startStage("download");
Downloader downloader = Downloader.create(config(), stats());
Downloader downloader = Downloader.create(config());
for (ToDownload toDownload : toDownload) {
if (profile.caresAboutSource(toDownload.id)) {
downloader.add(toDownload.id, toDownload.url, toDownload.path);
@ -920,7 +920,7 @@ public class Planetiler {
private void ensureInputFilesExist() {
for (InputPath inputPath : inputPaths) {
if (profile.caresAboutSource(inputPath.id) && !Files.exists(inputPath.path)) {
throw new IllegalArgumentException(inputPath.path + " does not exist");
throw new IllegalArgumentException(inputPath.path + " does not exist. Run with --download to fetch it");
}
}
}

Wyświetl plik

@ -27,6 +27,7 @@ import com.onthegomap.planetiler.geo.GeometryException;
import com.onthegomap.planetiler.geo.GeometryType;
import com.onthegomap.planetiler.geo.MutableCoordinateSequence;
import com.onthegomap.planetiler.util.Hilbert;
import com.onthegomap.planetiler.util.LayerAttrStats;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -80,6 +81,7 @@ public class VectorTile {
private static final int EXTENT = 4096;
private static final double SIZE = 256d;
private final Map<String, Layer> layers = new LinkedHashMap<>();
private LayerAttrStats.Updater.ForZoom layerStatsTracker = LayerAttrStats.Updater.ForZoom.NOOP;
private static int[] getCommands(Geometry input, int scale) {
var encoder = new CommandEncoder(scale);
@ -263,7 +265,7 @@ public class VectorTile {
lineStrings.add(gf.createLineString(coordSeq));
}
if (lineStrings.size() == 1) {
geometry = lineStrings.get(0);
geometry = lineStrings.getFirst();
} else if (lineStrings.size() > 1) {
geometry = gf.createMultiLineString(lineStrings.toArray(new LineString[0]));
}
@ -305,12 +307,12 @@ public class VectorTile {
}
List<Polygon> polygons = new ArrayList<>();
for (List<LinearRing> rings : polygonRings) {
LinearRing shell = rings.get(0);
LinearRing shell = rings.getFirst();
LinearRing[] holes = rings.subList(1, rings.size()).toArray(new LinearRing[rings.size() - 1]);
polygons.add(gf.createPolygon(shell, holes));
}
if (polygons.size() == 1) {
geometry = polygons.get(0);
geometry = polygons.getFirst();
}
if (polygons.size() > 1) {
geometry = gf.createMultiPolygon(GeometryFactory.toPolygonArray(polygons));
@ -376,7 +378,7 @@ public class VectorTile {
for (VectorTileProto.Tile.Feature feature : layer.getFeaturesList()) {
int tagsCount = feature.getTagsCount();
Map<String, Object> attrs = new HashMap<>(tagsCount / 2);
Map<String, Object> attrs = HashMap.newHashMap(tagsCount / 2);
int tagIdx = 0;
while (tagIdx < feature.getTagsCount()) {
String key = keys.get(feature.getTags(tagIdx++));
@ -467,12 +469,12 @@ public class VectorTile {
if (features.isEmpty()) {
return this;
}
Layer layer = layers.get(layerName);
if (layer == null) {
layer = new Layer();
layers.put(layerName, layer);
}
var statsTracker = layerStatsTracker.forLayer(layerName);
for (Feature inFeature : features) {
if (inFeature != null && inFeature.geometry().commands().length > 0) {
@ -481,8 +483,11 @@ public class VectorTile {
for (Map.Entry<String, ?> e : inFeature.attrs().entrySet()) {
// skip attribute without value
if (e.getValue() != null) {
outFeature.tags.add(layer.key(e.getKey()));
outFeature.tags.add(layer.value(e.getValue()));
String key = e.getKey();
Object value = e.getValue();
outFeature.tags.add(layer.key(key));
outFeature.tags.add(layer.value(value));
statsTracker.accept(key, value);
}
}
@ -509,20 +514,14 @@ public class VectorTile {
for (Object value : layer.values()) {
VectorTileProto.Tile.Value.Builder tileValue = VectorTileProto.Tile.Value.newBuilder();
if (value instanceof String stringValue) {
tileValue.setStringValue(stringValue);
} else if (value instanceof Integer intValue) {
tileValue.setSintValue(intValue);
} else if (value instanceof Long longValue) {
tileValue.setSintValue(longValue);
} else if (value instanceof Float floatValue) {
tileValue.setFloatValue(floatValue);
} else if (value instanceof Double doubleValue) {
tileValue.setDoubleValue(doubleValue);
} else if (value instanceof Boolean booleanValue) {
tileValue.setBoolValue(booleanValue);
} else {
tileValue.setStringValue(value.toString());
switch (value) {
case String stringValue -> tileValue.setStringValue(stringValue);
case Integer intValue -> tileValue.setSintValue(intValue);
case Long longValue -> tileValue.setSintValue(longValue);
case Float floatValue -> tileValue.setFloatValue(floatValue);
case Double doubleValue -> tileValue.setDoubleValue(doubleValue);
case Boolean booleanValue -> tileValue.setBoolValue(booleanValue);
case Object other -> tileValue.setStringValue(other.toString());
}
tileLayer.addValues(tileValue.build());
}
@ -600,6 +599,15 @@ public class VectorTile {
return layers.values().stream().allMatch(v -> v.encodedFeatures.isEmpty()) || containsOnlyFillsOrEdges();
}
/**
* Call back to {@code layerStats} as vector tile features are being encoded in
* {@link #addLayerFeatures(String, List)} to track attribute types present on features in each layer, for example to
* emit in tilejson metadata stats.
*/
public void trackLayerStats(LayerAttrStats.Updater.ForZoom layerStats) {
this.layerStatsTracker = layerStats;
}
enum Command {
MOVE_TO(1),
LINE_TO(2),
@ -1072,31 +1080,32 @@ public class VectorTile {
}
void accept(Geometry geometry) {
if (geometry instanceof MultiLineString multiLineString) {
for (int i = 0; i < multiLineString.getNumGeometries(); i++) {
encode(((LineString) multiLineString.getGeometryN(i)).getCoordinateSequence(), false, GeometryType.LINE);
switch (geometry) {
case MultiLineString multiLineString -> {
for (int i = 0; i < multiLineString.getNumGeometries(); i++) {
encode(((LineString) multiLineString.getGeometryN(i)).getCoordinateSequence(), false, GeometryType.LINE);
}
}
} else if (geometry instanceof Polygon polygon) {
LineString exteriorRing = polygon.getExteriorRing();
encode(exteriorRing.getCoordinateSequence(), true, GeometryType.POLYGON);
for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
LineString interiorRing = polygon.getInteriorRingN(i);
encode(interiorRing.getCoordinateSequence(), true, GeometryType.LINE);
case Polygon polygon -> {
LineString exteriorRing = polygon.getExteriorRing();
encode(exteriorRing.getCoordinateSequence(), true, GeometryType.POLYGON);
for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
LineString interiorRing = polygon.getInteriorRingN(i);
encode(interiorRing.getCoordinateSequence(), true, GeometryType.LINE);
}
}
} else if (geometry instanceof MultiPolygon multiPolygon) {
for (int i = 0; i < multiPolygon.getNumGeometries(); i++) {
accept(multiPolygon.getGeometryN(i));
case MultiPolygon multiPolygon -> {
for (int i = 0; i < multiPolygon.getNumGeometries(); i++) {
accept(multiPolygon.getGeometryN(i));
}
}
} else if (geometry instanceof LineString lineString) {
encode(lineString.getCoordinateSequence(), shouldClosePath(geometry), GeometryType.LINE);
} else if (geometry instanceof Point point) {
encode(point.getCoordinateSequence(), false, GeometryType.POINT);
} else if (geometry instanceof Puntal) {
encode(new CoordinateArraySequence(geometry.getCoordinates()), shouldClosePath(geometry),
case LineString lineString ->
encode(lineString.getCoordinateSequence(), shouldClosePath(geometry), GeometryType.LINE);
case Point point -> encode(point.getCoordinateSequence(), false, GeometryType.POINT);
case Puntal ignored -> encode(new CoordinateArraySequence(geometry.getCoordinates()), shouldClosePath(geometry),
geometry instanceof MultiPoint, GeometryType.POINT);
} else {
LOGGER.warn("Unrecognized geometry type: " + geometry.getGeometryType());
case null -> LOGGER.warn("Null geometry type");
default -> LOGGER.warn("Unrecognized geometry type: " + geometry.getGeometryType());
}
}

Wyświetl plik

@ -15,6 +15,7 @@ import com.onthegomap.planetiler.stats.Timer;
import com.onthegomap.planetiler.util.DiskBacked;
import com.onthegomap.planetiler.util.Format;
import com.onthegomap.planetiler.util.Hashing;
import com.onthegomap.planetiler.util.LayerAttrStats;
import com.onthegomap.planetiler.util.TileSizeStats;
import com.onthegomap.planetiler.util.TileWeights;
import com.onthegomap.planetiler.util.TilesetSummaryStatistics;
@ -59,6 +60,7 @@ public class TileArchiveWriter {
private final AtomicReference<TileCoord> lastTileWritten = new AtomicReference<>();
private final TileArchiveMetadata tileArchiveMetadata;
private final TilesetSummaryStatistics tileStats;
private final LayerAttrStats layerAttrStats = new LayerAttrStats();
private TileArchiveWriter(Iterable<FeatureGroup.TileFeatures> inputTiles, WriteableTileArchive archive,
PlanetilerConfig config, TileArchiveMetadata tileArchiveMetadata, Stats stats) {
@ -105,9 +107,7 @@ public class TileArchiveWriter {
readWorker = reader.readWorker();
}
TileArchiveWriter writer =
new TileArchiveWriter(inputTiles, output, config, tileArchiveMetadata.withLayerStats(features.layerStats()
.getTileStats()), stats);
TileArchiveWriter writer = new TileArchiveWriter(inputTiles, output, config, tileArchiveMetadata, stats);
var pipeline = WorkerPipeline.start("archive", stats);
@ -260,6 +260,7 @@ public class TileArchiveWriter {
boolean skipFilled = config.skipFilledTiles();
var tileStatsUpdater = tileStats.threadLocalUpdater();
var layerAttrStatsUpdater = layerAttrStats.handlerForThread();
for (TileBatch batch : prev) {
List<TileEncodingResult> result = new ArrayList<>(batch.size());
FeatureGroup.TileFeatures last = null;
@ -277,7 +278,7 @@ public class TileArchiveWriter {
layerStats = lastLayerStats;
memoizedTiles.inc();
} else {
VectorTile tile = tileFeatures.getVectorTile();
VectorTile tile = tileFeatures.getVectorTile(layerAttrStatsUpdater);
if (skipFilled && (lastIsFill = tile.containsOnlyFills())) {
encoded = null;
layerStats = null;
@ -333,7 +334,7 @@ public class TileArchiveWriter {
var f = NumberFormat.getNumberInstance(Locale.getDefault());
f.setMaximumFractionDigits(5);
archive.initialize(tileArchiveMetadata);
archive.initialize();
var order = archive.tileOrder();
TileCoord lastTile = null;
@ -371,7 +372,7 @@ public class TileArchiveWriter {
LOGGER.info("Finished z{} in {}", currentZ, time.stop());
}
archive.finish(tileArchiveMetadata);
archive.finish(tileArchiveMetadata.withLayerStats(layerAttrStats.getTileStats()));
}
@SuppressWarnings("java:S2629")

Wyświetl plik

@ -30,7 +30,7 @@ public interface WriteableTileArchive extends Closeable {
* Called before any tiles are written into {@link TileWriter}. Implementations of TileArchive should set up any
* required state here.
*/
default void initialize(TileArchiveMetadata metadata) {}
default void initialize() {}
/**
* Implementations should return a object that implements {@link TileWriter} The specific TileWriter returned might

Wyświetl plik

@ -95,7 +95,7 @@ class ArrayLongLongMapMmap implements LongLongMap.ParallelWrites {
int minChunks = 1;
int maxChunks = (int) (MAX_BYTES_TO_USE / chunkSize);
int targetChunks = (int) (ProcessInfo.getMaxMemoryBytes() * 0.5d / chunkSize);
return Math.min(maxChunks, Math.max(minChunks, targetChunks));
return Math.clamp(targetChunks, minChunks, maxChunks);
}
public void init() {

Wyświetl plik

@ -185,7 +185,7 @@ class ExternalMergeSort implements FeatureSort {
.sinkToConsumer("worker", workers, group -> {
try {
readSemaphore.acquire();
var chunk = group.get(0);
var chunk = group.getFirst();
var others = group.stream().skip(1).toList();
var toSort = time(reading, () -> {
// merge all chunks into first one, and remove the others

Wyświetl plik

@ -27,7 +27,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.annotation.concurrent.NotThreadSafe;
import org.msgpack.core.MessageBufferPacker;
@ -59,7 +58,6 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
private final CommonStringEncoder.AsByte commonLayerStrings = new CommonStringEncoder.AsByte();
private final CommonStringEncoder commonValueStrings = new CommonStringEncoder(100_000);
private final Stats stats;
private final LayerAttrStats layerStats = new LayerAttrStats();
private final PlanetilerConfig config;
private volatile boolean prepared = false;
private final TileOrder tileOrder;
@ -141,14 +139,6 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
return (byte) ((geometry.geomType().asByte() & 0xff) | (geometry.scale() << 3));
}
/**
* Returns statistics about each layer written through {@link #newRenderedFeatureEncoder()} including min/max zoom,
* features on elements in that layer, and their types.
*/
public LayerAttrStats layerStats() {
return layerStats;
}
public long numFeaturesWritten() {
return sorter.numFeaturesWritten();
}
@ -159,16 +149,13 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
// This method gets called billions of times when generating the planet, so these optimizations make a big difference:
// 1) Re-use the same buffer packer to avoid allocating and resizing new byte arrays for every feature.
private final MessageBufferPacker packer = MessagePack.newDefaultBufferPacker();
// 2) Avoid a ThreadLocal lookup on every layer stats call by getting the handler for this thread once
private final Consumer<RenderedFeature> threadLocalLayerStats = layerStats.handlerForThread();
// 3) Avoid re-encoding values for identical filled geometries (i.e. ocean) by memoizing the encoded values
// 2) Avoid re-encoding values for identical filled geometries (i.e. ocean) by memoizing the encoded values
// FeatureRenderer ensures that a separate VectorTileEncoder.Feature is used for each zoom level
private VectorTile.Feature lastFeature = null;
private byte[] lastEncodedValue = null;
@Override
public SortableFeature apply(RenderedFeature feature) {
threadLocalLayerStats.accept(feature);
var group = feature.group().orElse(null);
var thisFeature = feature.vectorTileFeature();
byte[] encodedValue;
@ -217,24 +204,18 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
var attrs = vectorTileFeature.attrs();
packer.packMapHeader((int) attrs.values().stream().filter(Objects::nonNull).count());
for (Map.Entry<String, Object> entry : attrs.entrySet()) {
if (entry.getValue() != null) {
Object value = entry.getValue();
if (value != null) {
packer.packInt(commonValueStrings.encode(entry.getKey()));
Object value = entry.getValue();
if (value instanceof String string) {
packer.packValue(ValueFactory.newString(string));
} else if (value instanceof Integer integer) {
packer.packValue(ValueFactory.newInteger(integer.longValue()));
} else if (value instanceof Long longValue) {
packer.packValue(ValueFactory.newInteger(longValue));
} else if (value instanceof Float floatValue) {
packer.packValue(ValueFactory.newFloat(floatValue));
} else if (value instanceof Double doubleValue) {
packer.packValue(ValueFactory.newFloat(doubleValue));
} else if (value instanceof Boolean booleanValue) {
packer.packValue(ValueFactory.newBoolean(booleanValue));
} else {
packer.packValue(ValueFactory.newString(value.toString()));
}
packer.packValue(switch (value) {
case String string -> ValueFactory.newString(string);
case Integer integer -> ValueFactory.newInteger(integer.longValue());
case Long longValue -> ValueFactory.newInteger(longValue);
case Float floatValue -> ValueFactory.newFloat(floatValue);
case Double doubleValue -> ValueFactory.newFloat(doubleValue);
case Boolean booleanValue -> ValueFactory.newBoolean(booleanValue);
case Object other -> ValueFactory.newString(other.toString());
});
}
}
// Use the same binary format for encoding geometries in output vector tiles. Benchmarking showed
@ -423,7 +404,7 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
GeometryType geomType = decodeGeomType(geomTypeAndScale);
int scale = decodeScale(geomTypeAndScale);
int mapSize = unpacker.unpackMapHeader();
Map<String, Object> attrs = new HashMap<>(mapSize);
Map<String, Object> attrs = HashMap.newHashMap(mapSize);
for (int i = 0; i < mapSize; i++) {
String key = commonValueStrings.decode(unpacker.unpackInt());
Value v = unpacker.unpackValue();
@ -456,7 +437,14 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
}
public VectorTile getVectorTile() {
return getVectorTile(null);
}
public VectorTile getVectorTile(LayerAttrStats.Updater layerStats) {
VectorTile tile = new VectorTile();
if (layerStats != null) {
tile.trackLayerStats(layerStats.forZoom(tileCoord.z()));
}
List<VectorTile.Feature> items = new ArrayList<>(entries.size());
String currentLayer = null;
for (SortableFeature entry : entries) {
@ -494,7 +482,7 @@ public final class FeatureGroup implements Iterable<FeatureGroup.TileFeatures>,
// log failures, only throwing when it's a fatal error
if (e instanceof GeometryException geoe) {
geoe.log(stats, "postprocess_layer",
"Caught error postprocessing features for " + layer + " layer on " + tileCoord);
"Caught error postprocessing features for " + layer + " layer on " + tileCoord, config.logJtsExceptions());
} else if (e instanceof Error err) {
LOGGER.error("Caught fatal error postprocessing features {} {}", layer, tileCoord, e);
throw err;

Wyświetl plik

@ -17,6 +17,7 @@ import org.slf4j.LoggerFactory;
public class Bounds {
private static final Logger LOGGER = LoggerFactory.getLogger(Bounds.class);
public static final Bounds WORLD = new Bounds(null);
private Envelope latLon;
private Envelope world;
@ -24,14 +25,10 @@ public class Bounds {
private Geometry shape;
Bounds(Envelope latLon) {
public Bounds(Envelope latLon) {
set(latLon);
}
public boolean isWorld() {
return latLon == null || latLon.contains(GeoUtils.WORLD_LAT_LON_BOUNDS);
}
public Envelope latLon() {
return latLon == null ? GeoUtils.WORLD_LAT_LON_BOUNDS : latLon;
}
@ -40,6 +37,10 @@ public class Bounds {
return world == null ? GeoUtils.WORLD_BOUNDS : world;
}
public boolean isWorld() {
return latLon == null || latLon.equals(GeoUtils.WORLD_LAT_LON_BOUNDS);
}
public TileExtents tileExtents() {
if (tileExtents == null) {
tileExtents = TileExtents.computeFromWorldBounds(PlanetilerConfig.MAX_MAXZOOM, world(), shape);

Wyświetl plik

@ -58,7 +58,8 @@ public record PlanetilerConfig(
String debugUrlPattern,
Path tmpDir,
Path tileWeights,
double maxPointBuffer
double maxPointBuffer,
boolean logJtsExceptions
) {
public static final int MIN_MINZOOM = 0;
@ -208,7 +209,8 @@ public record PlanetilerConfig(
"Max tile pixels to include points outside tile bounds. Set to a lower value to reduce tile size for " +
"clients that handle label collisions across tiles (most web and native clients). NOTE: Do not reduce if you need to support " +
"raster tile rendering",
Double.POSITIVE_INFINITY)
Double.POSITIVE_INFINITY),
arguments.getBoolean("log_jts_exceptions", "Emit verbose details to debug JTS geometry errors", false)
);
}

Wyświetl plik

@ -34,19 +34,14 @@ public enum DataType implements BiFunction<WithTags, String, Object> {
/** Returns the data type associated with {@code value}, or {@link #GET_TAG} as a fallback. */
public static DataType typeOf(Object value) {
if (value instanceof String) {
return GET_STRING;
} else if (value instanceof Integer) {
return GET_INT;
} else if (value instanceof Long) {
return GET_LONG;
} else if (value instanceof Double) {
return GET_DOUBLE;
} else if (value instanceof Boolean) {
return GET_BOOLEAN;
} else {
return GET_TAG;
}
return switch (value) {
case String ignored -> GET_STRING;
case Integer ignored -> GET_INT;
case Long ignored -> GET_LONG;
case Double ignored -> GET_DOUBLE;
case Boolean ignored -> GET_BOOLEAN;
default -> GET_TAG;
};
}
/** Returns the data type associated with {@code id}, or {@link #GET_TAG} as a fallback. */

Wyświetl plik

@ -23,12 +23,9 @@ import org.slf4j.LoggerFactory;
* <p>
* Calling {@code toString()} on any expression will generate code that can be used to recreate an identical copy of the
* original expression, assuming that the generated code includes:
*
* <pre>
* {@code
* {@snippet :
* import static com.onthegomap.planetiler.expression.Expression.*;
* }
* </pre>
*/
// TODO rename to BooleanExpression
public interface Expression extends Simplifiable<Expression> {
@ -141,14 +138,13 @@ public interface Expression extends Simplifiable<Expression> {
default Expression replace(Predicate<Expression> replace, Expression b) {
if (replace.test(this)) {
return b;
} else if (this instanceof Not not) {
return new Not(not.child.replace(replace, b));
} else if (this instanceof Or or) {
return new Or(or.children.stream().map(child -> child.replace(replace, b)).toList());
} else if (this instanceof And and) {
return new And(and.children.stream().map(child -> child.replace(replace, b)).toList());
} else {
return this;
return switch (this) {
case Not(var child) -> new Not(child.replace(replace, b));
case Or(var children) -> new Or(children.stream().map(child -> child.replace(replace, b)).toList());
case And(var children) -> new And(children.stream().map(child -> child.replace(replace, b)).toList());
default -> this;
};
}
}
@ -156,14 +152,13 @@ public interface Expression extends Simplifiable<Expression> {
default boolean contains(Predicate<Expression> filter) {
if (filter.test(this)) {
return true;
} else if (this instanceof Not not) {
return not.child.contains(filter);
} else if (this instanceof Or or) {
return or.children.stream().anyMatch(child -> child.contains(filter));
} else if (this instanceof And and) {
return and.children.stream().anyMatch(child -> child.contains(filter));
} else {
return false;
return switch (this) {
case Not(var child) -> child.contains(filter);
case Or(var children) -> children.stream().anyMatch(child -> child.contains(filter));
case And(var children) -> children.stream().anyMatch(child -> child.contains(filter));
default -> false;
};
}
}
@ -234,7 +229,7 @@ public interface Expression extends Simplifiable<Expression> {
return TRUE;
}
if (children.size() == 1) {
return children.get(0).simplifyOnce();
return children.getFirst().simplifyOnce();
}
if (children.contains(FALSE)) {
return FALSE;
@ -288,7 +283,7 @@ public interface Expression extends Simplifiable<Expression> {
return FALSE;
}
if (children.size() == 1) {
return children.get(0).simplifyOnce();
return children.getFirst().simplifyOnce();
}
if (children.contains(TRUE)) {
return TRUE;

Wyświetl plik

@ -50,19 +50,15 @@ public record MultiExpression<T> (List<Entry<T>> expressions) implements Simplif
* when a particular key is present on the input.
*/
private static boolean mustAlwaysEvaluate(Expression expression) {
if (expression instanceof Expression.Or or) {
return or.children().stream().anyMatch(MultiExpression::mustAlwaysEvaluate);
} else if (expression instanceof Expression.And and) {
return and.children().stream().allMatch(MultiExpression::mustAlwaysEvaluate);
} else if (expression instanceof Expression.Not not) {
return !mustAlwaysEvaluate(not.child());
} else if (expression instanceof Expression.MatchAny any && any.matchWhenMissing()) {
return true;
} else {
return !(expression instanceof Expression.MatchAny) &&
!(expression instanceof Expression.MatchField) &&
!FALSE.equals(expression);
}
return switch (expression) {
case Expression.Or(var children) -> children.stream().anyMatch(MultiExpression::mustAlwaysEvaluate);
case Expression.And(var children) -> children.stream().allMatch(MultiExpression::mustAlwaysEvaluate);
case Expression.Not(var child) -> !mustAlwaysEvaluate(child);
case Expression.MatchAny any when any.matchWhenMissing() -> true;
case null, default -> !(expression instanceof Expression.MatchAny) &&
!(expression instanceof Expression.MatchField) &&
!FALSE.equals(expression);
};
}
/** Calls {@code acceptKey} for every tag that could possibly cause {@code exp} to match an input element. */
@ -176,7 +172,7 @@ public record MultiExpression<T> (List<Entry<T>> expressions) implements Simplif
*/
default O getOrElse(WithTags input, O defaultValue) {
List<O> matches = getMatches(input);
return matches.isEmpty() ? defaultValue : matches.get(0);
return matches.isEmpty() ? defaultValue : matches.getFirst();
}
/**
@ -184,7 +180,7 @@ public record MultiExpression<T> (List<Entry<T>> expressions) implements Simplif
*/
default O getOrElse(Map<String, Object> tags, O defaultValue) {
List<O> matches = getMatches(WithTags.from(tags));
return matches.isEmpty() ? defaultValue : matches.get(0);
return matches.isEmpty() ? defaultValue : matches.getFirst();
}
/** Returns true if any expression matches that tags from an input element. */

Wyświetl plik

@ -1,6 +1,7 @@
package com.onthegomap.planetiler.geo;
import com.onthegomap.planetiler.collection.LongLongMap;
import com.onthegomap.planetiler.config.PlanetilerConfig;
import com.onthegomap.planetiler.stats.Stats;
import java.util.ArrayList;
import java.util.List;
@ -51,6 +52,7 @@ public class GeoUtils {
public static final double WORLD_CIRCUMFERENCE_METERS = Math.PI * 2 * WORLD_RADIUS_METERS;
private static final double RADIANS_PER_DEGREE = Math.PI / 180;
private static final double DEGREES_PER_RADIAN = 180 / Math.PI;
private static final double LOG2 = Math.log(2);
/**
* Transform web mercator coordinates where top-left corner of the planet is (0,0) and bottom-right is (1,1) to
* latitude/longitude coordinates.
@ -281,15 +283,15 @@ public class GeoUtils {
}
public static Geometry combineLineStrings(List<LineString> lineStrings) {
return lineStrings.size() == 1 ? lineStrings.get(0) : createMultiLineString(lineStrings);
return lineStrings.size() == 1 ? lineStrings.getFirst() : createMultiLineString(lineStrings);
}
public static Geometry combinePolygons(List<Polygon> polys) {
return polys.size() == 1 ? polys.get(0) : createMultiPolygon(polys);
return polys.size() == 1 ? polys.getFirst() : createMultiPolygon(polys);
}
public static Geometry combinePoints(List<Point> points) {
return points.size() == 1 ? points.get(0) : createMultiPoint(points);
return points.size() == 1 ? points.getFirst() : createMultiPoint(points);
}
/**
@ -383,29 +385,29 @@ public class GeoUtils {
if (lineStrings.isEmpty()) {
throw new GeometryException("polygon_to_linestring_empty", "No line strings");
} else if (lineStrings.size() == 1) {
return lineStrings.get(0);
return lineStrings.getFirst();
} else {
return createMultiLineString(lineStrings);
}
}
private static void getLineStrings(Geometry input, List<LineString> output) throws GeometryException {
if (input instanceof LinearRing linearRing) {
output.add(JTS_FACTORY.createLineString(linearRing.getCoordinateSequence()));
} else if (input instanceof LineString lineString) {
output.add(lineString);
} else if (input instanceof Polygon polygon) {
getLineStrings(polygon.getExteriorRing(), output);
for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
getLineStrings(polygon.getInteriorRingN(i), output);
switch (input) {
case LinearRing linearRing -> output.add(JTS_FACTORY.createLineString(linearRing.getCoordinateSequence()));
case LineString lineString -> output.add(lineString);
case Polygon polygon -> {
getLineStrings(polygon.getExteriorRing(), output);
for (int i = 0; i < polygon.getNumInteriorRing(); i++) {
getLineStrings(polygon.getInteriorRingN(i), output);
}
}
} else if (input instanceof GeometryCollection gc) {
for (int i = 0; i < gc.getNumGeometries(); i++) {
getLineStrings(gc.getGeometryN(i), output);
case GeometryCollection gc -> {
for (int i = 0; i < gc.getNumGeometries(); i++) {
getLineStrings(gc.getGeometryN(i), output);
}
}
} else {
throw new GeometryException("get_line_strings_bad_type",
"unrecognized geometry type: " + input.getGeometryType());
case null, default -> throw new GeometryException("get_line_strings_bad_type",
"unrecognized geometry type: " + (input == null ? "null" : input.getGeometryType()));
}
}
@ -416,7 +418,7 @@ public class GeoUtils {
/** Returns a point approximately {@code ratio} of the way from start to end and {@code offset} units to the right. */
public static Point pointAlongOffset(LineString lineString, double ratio, double offset) {
int numPoints = lineString.getNumPoints();
int middle = Math.max(0, Math.min(numPoints - 2, (int) (numPoints * ratio)));
int middle = Math.clamp((int) (numPoints * ratio), 0, numPoints - 2);
Coordinate a = lineString.getCoordinateN(middle);
Coordinate b = lineString.getCoordinateN(middle + 1);
LineSegment segment = new LineSegment(a, b);
@ -530,10 +532,22 @@ public class GeoUtils {
innerGeometries.add(geom);
}
}
return innerGeometries.size() == 1 ? innerGeometries.get(0) :
return innerGeometries.size() == 1 ? innerGeometries.getFirst() :
JTS_FACTORY.createGeometryCollection(innerGeometries.toArray(Geometry[]::new));
}
/**
* For a feature of size {@code worldGeometrySize} (where 1=full planet), determine the minimum zoom level at which
* the feature appears at least {@code minPixelSize} pixels large.
* <p>
* The result will be clamped to the range [0, {@link PlanetilerConfig#MAX_MAXZOOM}].
*/
public static int minZoomForPixelSize(double worldGeometrySize, double minPixelSize) {
double worldPixels = worldGeometrySize * 256;
return Math.clamp((int) Math.ceil(Math.log(minPixelSize / worldPixels) / LOG2), 0,
PlanetilerConfig.MAX_MAXZOOM);
}
/** Helper class to sort polygons by area of their outer shell. */
private record PolyAndArea(Polygon poly, double area) implements Comparable<PolyAndArea> {

Wyświetl plik

@ -1,6 +1,12 @@
package com.onthegomap.planetiler.geo;
import com.onthegomap.planetiler.stats.Stats;
import java.util.ArrayList;
import java.util.Base64;
import java.util.function.Supplier;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.WKBWriter;
import org.locationtech.jts.io.WKTWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -14,6 +20,7 @@ public class GeometryException extends Exception {
private final String stat;
private final boolean nonFatal;
private final ArrayList<Supplier<String>> detailsSuppliers = new ArrayList<>();
/**
* Constructs a new exception with a detailed error message caused by {@code cause}.
@ -51,6 +58,11 @@ public class GeometryException extends Exception {
this.nonFatal = nonFatal;
}
public GeometryException addDetails(Supplier<String> detailsSupplier) {
this.detailsSuppliers.add(detailsSupplier);
return this;
}
/** Returns the unique code for this error condition to use for counting the number of occurrences in stats. */
public String stat() {
return stat;
@ -72,6 +84,38 @@ public class GeometryException extends Exception {
assert nonFatal : log; // make unit tests fail if fatal
}
/** Logs the error but if {@code logDetails} is true, then also prints detailed debugging info. */
public void log(Stats stats, String statPrefix, String logPrefix, boolean logDetails) {
if (logDetails) {
stats.dataError(statPrefix + "_" + stat());
StringBuilder log = new StringBuilder(logPrefix + ": " + getMessage());
for (var details : detailsSuppliers) {
log.append("\n").append(details.get());
}
var str = log.toString();
LOGGER.warn(str, this.getCause() == null ? this : this.getCause());
assert nonFatal : log.toString(); // make unit tests fail if fatal
} else {
log(stats, statPrefix, logPrefix);
}
}
public GeometryException addGeometryDetails(String original, Geometry geometryCollection) {
return addDetails(() -> {
var wktWriter = new WKTWriter();
var wkbWriter = new WKBWriter();
var base64 = Base64.getEncoder();
return """
%s (wkt): %s
%s (wkb): %s
""".formatted(
original, wktWriter.write(geometryCollection),
original, base64.encodeToString(wkbWriter.write(geometryCollection))
).strip();
});
}
/**
* An error that we expect to encounter often so should only be logged at {@code TRACE} level.
*/

Wyświetl plik

@ -19,7 +19,7 @@ import org.locationtech.jts.index.strtree.STRtree;
@ThreadSafe
public class PolygonIndex<T> {
private record GeomWithData<T> (Polygon poly, T data) {}
private record GeomWithData<T>(Polygon poly, T data) {}
private final STRtree index = new STRtree();
@ -45,7 +45,7 @@ public class PolygonIndex<T> {
/** Returns the data associated with the first polygon containing {@code point}. */
public T getOnlyContaining(Point point) {
List<T> result = getContaining(point);
return result.isEmpty() ? null : result.get(0);
return result.isEmpty() ? null : result.getFirst();
}
/** Returns the data associated with all polygons containing {@code point}. */
@ -77,7 +77,7 @@ public class PolygonIndex<T> {
List<?> items = index.query(point.getEnvelopeInternal());
// optimization: if there's only one then skip checking contains/distance
if (items.size() == 1) {
if (items.get(0)instanceof GeomWithData<?> value) {
if (items.getFirst() instanceof GeomWithData<?> value) {
@SuppressWarnings("unchecked") T t = (T) value.data;
return List.of(t);
}
@ -108,7 +108,7 @@ public class PolygonIndex<T> {
/** Returns the data associated with a polygon that contains {@code point} or nearest polygon if none are found. */
public T get(Point point) {
List<T> nearests = getContainingOrNearest(point);
return nearests.isEmpty() ? null : nearests.get(0);
return nearests.isEmpty() ? null : nearests.getFirst();
}
/** Indexes {@code item} for all polygons contained in {@code geom}. */

Wyświetl plik

@ -21,11 +21,11 @@ public class TileExtents implements Predicate<TileCoord> {
}
private static int quantizeDown(double value, int levels) {
return Math.max(0, Math.min(levels, (int) Math.floor(value * levels)));
return Math.clamp((int) Math.floor(value * levels), 0, levels);
}
private static int quantizeUp(double value, int levels) {
return Math.max(0, Math.min(levels, (int) Math.ceil(value * levels)));
return Math.clamp((int) Math.ceil(value * levels), 0, levels);
}
/** Returns a filter to tiles that intersect {@code worldBounds} (specified in world web mercator coordinates). */

Wyświetl plik

@ -220,7 +220,7 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
}
@Override
public void initialize(TileArchiveMetadata tileArchiveMetadata) {
public void initialize() {
if (skipIndexCreation) {
createTablesWithoutIndexes();
if (LOGGER.isInfoEnabled()) {
@ -230,12 +230,11 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
} else {
createTablesWithIndexes();
}
metadataTable().set(tileArchiveMetadata);
}
@Override
public void finish(TileArchiveMetadata tileArchiveMetadata) {
metadataTable().set(tileArchiveMetadata);
if (vacuumAnalyze) {
vacuumAnalyze();
}

Wyświetl plik

@ -74,7 +74,7 @@ public class Overture implements Profile {
}
private static void downloadFiles(Path base, Planetiler pt, String release, boolean sample) {
var d = Downloader.create(pt.config(), pt.stats());
var d = Downloader.create(pt.config());
var urls = sample ?
OvertureUrls.sampleSmallest(pt.config(), "release/" + release) :
OvertureUrls.getAll(pt.config(), "release/" + release);
@ -149,8 +149,8 @@ public class Overture implements Profile {
String clazz = sourceFeature.getStruct().get("class").asString();
createAnyFeature(sourceFeature, features)
.setMinZoom(sourceFeature.isPoint() ? 14 : switch (clazz) {
case "residential" -> 6;
default -> 9;
case "residential" -> 6;
default -> 9;
})
.inheritAttrFromSource("subType")
.inheritAttrFromSource("class")
@ -254,24 +254,24 @@ public class Overture implements Profile {
}
int minzoom = switch (subtype) {
case ROAD -> switch (roadClass) {
case MOTORWAY -> 4;
case TRUNK -> 5;
case PRIMARY -> 7;
case SECONDARY -> 9;
case TERTIARY -> 11;
case RESIDENTIAL -> 12;
case LIVINGSTREET -> 13;
case UNCLASSIFIED -> 14;
case PARKINGAISLE -> 14;
case DRIVEWAY -> 14;
case PEDESTRIAN -> 14;
case FOOTWAY -> 14;
case STEPS -> 14;
case TRACK -> 14;
case CYCLEWAY -> 14;
case BRIDLEWAY -> 14;
case UNKNOWN -> 14;
};
case MOTORWAY -> 4;
case TRUNK -> 5;
case PRIMARY -> 7;
case SECONDARY -> 9;
case TERTIARY -> 11;
case RESIDENTIAL -> 12;
case LIVINGSTREET -> 13;
case UNCLASSIFIED -> 14;
case PARKINGAISLE -> 14;
case DRIVEWAY -> 14;
case PEDESTRIAN -> 14;
case FOOTWAY -> 14;
case STEPS -> 14;
case TRACK -> 14;
case CYCLEWAY -> 14;
case BRIDLEWAY -> 14;
case UNKNOWN -> 14;
};
case RAIL -> 8;
case WATER -> 10;
};
@ -433,7 +433,7 @@ public class Overture implements Profile {
private static final Range<Double> FULL_LENGTH = Range.closedOpen(0.0, 1.0);
record Partial<T> (T value, Range<Double> at) {}
record Partial<T>(T value, Range<Double> at) {}
private void processConnector(AvroParquetFeature sourceFeature, FeatureCollector features) {
if (connectors) {

Wyświetl plik

@ -153,7 +153,7 @@ public class GeoPackageReader extends SimpleReader<SimpleFeature> {
Geometry latLonGeom = (transform.isIdentity()) ? featureGeom : JTS.transform(featureGeom, transform);
FeatureColumns columns = feature.getColumns();
SimpleFeature geom = SimpleFeature.create(latLonGeom, new HashMap<>(columns.columnCount()),
SimpleFeature geom = SimpleFeature.create(latLonGeom, HashMap.newHashMap(columns.columnCount()),
sourceName, featureName, ++id);
for (int i = 0; i < columns.columnCount(); ++i) {

Wyświetl plik

@ -177,7 +177,7 @@ public class NaturalEarthReader extends SimpleReader<SimpleFeature> {
// create the feature and pass to next stage
Geometry latLonGeometry = GeoUtils.WKB_READER.read(geometry);
SimpleFeature readerGeometry = SimpleFeature.create(latLonGeometry, new HashMap<>(column.length - 1),
SimpleFeature readerGeometry = SimpleFeature.create(latLonGeometry, HashMap.newHashMap(column.length - 1),
sourceName, table, ++id);
for (int c = 0; c < column.length; c++) {
if (c != geometryColumn) {

Wyświetl plik

@ -2,6 +2,7 @@ package com.onthegomap.planetiler.reader;
import com.onthegomap.planetiler.Profile;
import com.onthegomap.planetiler.collection.FeatureGroup;
import com.onthegomap.planetiler.config.Bounds;
import com.onthegomap.planetiler.config.PlanetilerConfig;
import com.onthegomap.planetiler.stats.Stats;
import java.io.IOException;
@ -19,9 +20,13 @@ import org.geotools.api.referencing.operation.MathTransform;
import org.geotools.api.referencing.operation.OperationNotFoundException;
import org.geotools.api.referencing.operation.TransformException;
import org.geotools.data.shapefile.ShapefileDataStore;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.feature.FeatureCollection;
import org.geotools.geometry.jts.JTS;
import org.geotools.geometry.jts.ReferencedEnvelope;
import org.geotools.referencing.CRS;
import org.geotools.util.factory.GeoTools;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -45,6 +50,10 @@ public class ShapefileReader extends SimpleReader<SimpleFeature> {
private MathTransform transformToLatLon;
public ShapefileReader(String sourceProjection, String sourceName, Path input) {
this(sourceProjection, sourceName, input, Bounds.WORLD);
}
public ShapefileReader(String sourceProjection, String sourceName, Path input, Bounds bounds) {
super(sourceName);
this.layer = input.getFileName().toString().replaceAll("\\.shp$", "");
dataStore = open(input);
@ -52,8 +61,6 @@ public class ShapefileReader extends SimpleReader<SimpleFeature> {
String typeName = dataStore.getTypeNames()[0];
FeatureSource<SimpleFeatureType, org.geotools.api.feature.simple.SimpleFeature> source = dataStore
.getFeatureSource(typeName);
inputSource = source.getFeatures(Filter.INCLUDE);
CoordinateReferenceSystem src =
sourceProjection == null ? source.getSchema().getCoordinateReferenceSystem() : CRS.decode(sourceProjection);
CoordinateReferenceSystem dest = CRS.decode("EPSG:4326", true);
@ -61,6 +68,26 @@ public class ShapefileReader extends SimpleReader<SimpleFeature> {
if (transformToLatLon.isIdentity()) {
transformToLatLon = null;
}
Filter filter = Filter.INCLUDE;
Envelope env = bounds.latLon();
if (!bounds.isWorld()) {
var ff = CommonFactoryFinder.getFilterFactory(GeoTools.getDefaultHints());
var schema = source.getSchema();
String geometryPropertyName = schema.getGeometryDescriptor().getLocalName();
var bbox = new ReferencedEnvelope(env.getMinX(), env.getMaxX(), env.getMinY(), env.getMaxY(), dest);
try {
var bbox2 = bbox.transform(schema.getGeometryDescriptor().getCoordinateReferenceSystem(), true);
filter = ff.bbox(ff.property(geometryPropertyName), bbox2);
} catch (TransformException e) {
// just use include filter
}
}
inputSource = source.getFeatures(filter);
attributeNames = new String[inputSource.getSchema().getAttributeCount()];
for (int i = 0; i < attributeNames.length; i++) {
attributeNames[i] = inputSource.getSchema().getDescriptor(i).getLocalName();
@ -105,7 +132,7 @@ public class ShapefileReader extends SimpleReader<SimpleFeature> {
SourceFeatureProcessor.processFiles(
sourceName,
sourcePaths,
path -> new ShapefileReader(sourceProjection, sourceName, path),
path -> new ShapefileReader(sourceProjection, sourceName, path, config.bounds()),
writer, config, profile, stats
);
}
@ -137,7 +164,7 @@ public class ShapefileReader extends SimpleReader<SimpleFeature> {
latLonGeometry = JTS.transform(source, transformToLatLon);
}
if (latLonGeometry != null) {
SimpleFeature geom = SimpleFeature.create(latLonGeometry, new HashMap<>(attributeNames.length),
SimpleFeature geom = SimpleFeature.create(latLonGeometry, HashMap.newHashMap(attributeNames.length),
sourceName, layer, ++id);
for (int i = 1; i < attributeNames.length; i++) {
geom.setTag(attributeNames[i], feature.getAttribute(i));

Wyświetl plik

@ -1,8 +1,10 @@
package com.onthegomap.planetiler.reader;
import com.onthegomap.planetiler.geo.GeoUtils;
import com.onthegomap.planetiler.reader.osm.OsmElement;
import com.onthegomap.planetiler.reader.osm.OsmReader;
import com.onthegomap.planetiler.reader.osm.OsmRelationInfo;
import com.onthegomap.planetiler.reader.osm.OsmSourceFeature;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -76,29 +78,87 @@ public class SimpleFeature extends SourceFeature {
return new SimpleFeature(latLonGeometry, null, tags, null, null, idGenerator.incrementAndGet(), null);
}
private static class SimpleOsmFeature extends SimpleFeature implements OsmSourceFeature {
private final String area;
private final OsmElement.Info info;
private SimpleOsmFeature(Geometry latLonGeometry, Geometry worldGeometry, Map<String, Object> tags, String source,
String sourceLayer, long id, List<OsmReader.RelationMember<OsmRelationInfo>> relations, OsmElement.Info info) {
super(latLonGeometry, worldGeometry, tags, source, sourceLayer, id, relations);
this.area = (String) tags.get("area");
this.info = info;
}
@Override
public boolean canBePolygon() {
return latLonGeometry() instanceof Polygonal || (latLonGeometry() instanceof LineString line &&
OsmReader.canBePolygon(line.isClosed(), area, latLonGeometry().getNumPoints()));
}
@Override
public boolean canBeLine() {
return latLonGeometry() instanceof MultiLineString || (latLonGeometry() instanceof LineString line &&
OsmReader.canBeLine(line.isClosed(), area, latLonGeometry().getNumPoints()));
}
@Override
protected Geometry computePolygon() {
var geom = worldGeometry();
return geom instanceof LineString line ? GeoUtils.JTS_FACTORY.createPolygon(line.getCoordinates()) : geom;
}
@Override
public OsmElement originalElement() {
return new OsmElement() {
@Override
public long id() {
return SimpleOsmFeature.this.id();
}
@Override
public Info info() {
return info;
}
@Override
public int cost() {
return 1;
}
@Override
public Map<String, Object> tags() {
return tags();
}
};
}
@Override
public boolean equals(Object o) {
return this == o || (o instanceof SimpleOsmFeature other && super.equals(other) &&
Objects.equals(area, other.area) && Objects.equals(info, other.info));
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (area != null ? area.hashCode() : 0);
result = 31 * result + (info != null ? info.hashCode() : 0);
return result;
}
}
/** Returns a new feature with OSM relation info. Useful for setting up inputs for OSM unit tests. */
public static SimpleFeature createFakeOsmFeature(Geometry latLonGeometry, Map<String, Object> tags, String source,
String sourceLayer, long id, List<OsmReader.RelationMember<OsmRelationInfo>> relations) {
String area = (String) tags.get("area");
return new SimpleFeature(latLonGeometry, null, tags, source, sourceLayer, id, relations) {
@Override
public boolean canBePolygon() {
return latLonGeometry instanceof Polygonal || (latLonGeometry instanceof LineString line &&
OsmReader.canBePolygon(line.isClosed(), area, latLonGeometry.getNumPoints()));
}
return createFakeOsmFeature(latLonGeometry, tags, source, sourceLayer, id, relations, null);
}
@Override
public boolean canBeLine() {
return latLonGeometry instanceof MultiLineString || (latLonGeometry instanceof LineString line &&
OsmReader.canBeLine(line.isClosed(), area, latLonGeometry.getNumPoints()));
}
@Override
protected Geometry computePolygon() {
var geom = worldGeometry();
return geom instanceof LineString line ? GeoUtils.JTS_FACTORY.createPolygon(line.getCoordinates()) : geom;
}
};
/** Returns a new feature with OSM relation info and metadata. Useful for setting up inputs for OSM unit tests. */
public static SimpleFeature createFakeOsmFeature(Geometry latLonGeometry, Map<String, Object> tags, String source,
String sourceLayer, long id, List<OsmReader.RelationMember<OsmRelationInfo>> relations, OsmElement.Info info) {
return new SimpleOsmFeature(latLonGeometry, null, tags, source, sourceLayer, id, relations, info);
}
@Override

Wyświetl plik

@ -35,13 +35,15 @@ public abstract class SourceFeature implements WithTags, WithGeometryType {
private final long id;
private Geometry centroid = null;
private Geometry pointOnSurface = null;
private Geometry innermostPoint = null;
private Geometry centroidIfConvex = null;
private double innermostPointTolerance = Double.NaN;
private Geometry innermostPoint = null;
private Geometry linearGeometry = null;
private Geometry polygonGeometry = null;
private Geometry validPolygon = null;
private double area = Double.NaN;
private double length = Double.NaN;
private double size = Double.NaN;
// slight optimization: replace default implementation with direct access to the tags
// map to get slightly improved performance when matching elements against expressions
@ -127,18 +129,30 @@ public abstract class SourceFeature implements WithTags, WithGeometryType {
worldGeometry().getInteriorPoint());
}
public final Geometry innermostPoint() throws GeometryException {
if (innermostPoint == null) {
Geometry polygon = polygon();
innermostPoint = MaximumInscribedCircle.getCenter(polygon(), Math.sqrt(polygon.getArea() / 100d));
/**
* Returns {@link MaximumInscribedCircle#getCenter()} of this geometry in world web mercator coordinates.
*
* @param tolerance precision for calculating maximum inscribed circle. 0.01 means 1% of the square root of the area.
* Smaller values for a more precise tolerance become very expensive to compute. Values between
* 0.05-0.1 are a good compromise of performance vs. precision.
*/
public final Geometry innermostPoint(double tolerance) throws GeometryException {
if (canBePolygon()) {
// cache as long as the tolerance hasn't changed
if (tolerance != innermostPointTolerance || innermostPoint == null) {
innermostPoint = MaximumInscribedCircle.getCenter(polygon(), Math.sqrt(area()) * tolerance);
innermostPointTolerance = tolerance;
}
return innermostPoint;
} else {
return pointOnSurface();
}
return innermostPoint;
}
private Geometry computeCentroidIfConvex() throws GeometryException {
if (!canBePolygon()) {
return centroid();
} else if (polygon()instanceof Polygon poly &&
} else if (polygon() instanceof Polygon poly &&
poly.getNumInteriorRing() == 0 &&
GeoUtils.isConvex(poly.getExteriorRing())) {
return centroid();
@ -255,6 +269,14 @@ public abstract class SourceFeature implements WithTags, WithGeometryType {
(isPoint() || canBePolygon() || canBeLine()) ? worldGeometry().getLength() : 0) : length;
}
/**
* Returns and caches sqrt of {@link #area()} if polygon or {@link #length()} if a line string.
*/
public double size() throws GeometryException {
return Double.isNaN(size) ? (size = canBePolygon() ? Math.sqrt(Math.abs(area())) : canBeLine() ? length() : 0) :
size;
}
/** Returns the ID of the source that this feature came from. */
public String getSource() {
return source;
@ -302,4 +324,5 @@ public abstract class SourceFeature implements WithTags, WithGeometryType {
public boolean hasRelationInfo() {
return relationInfos != null && !relationInfos.isEmpty();
}
}

Wyświetl plik

@ -232,7 +232,7 @@ public class OsmMultipolygon {
if (numPolygons == 0) {
return shells;
}
shells.add(polygons.get(0));
shells.add(polygons.getFirst());
if (numPolygons == 1) {
return shells;
}

Wyświetl plik

@ -133,7 +133,7 @@ public class PbfDecoder implements Iterable<OsmElement> {
private Map<String, Object> buildTags(int num, IntUnaryOperator key, IntUnaryOperator value) {
if (num > 0) {
Map<String, Object> tags = new HashMap<>(num);
Map<String, Object> tags = HashMap.newHashMap(num);
for (int i = 0; i < num; i++) {
String k = fieldDecoder.decodeString(key.applyAsInt(i));
String v = fieldDecoder.decodeString(value.applyAsInt(i));
@ -366,7 +366,7 @@ public class PbfDecoder implements Iterable<OsmElement> {
if (tags == null) {
// divide by 2 as key&value, multiply by 2 because of the better approximation
tags = new HashMap<>(Math.max(3, 2 * (nodes.getKeysValsCount() / 2) / nodes.getKeysValsCount()));
tags = HashMap.newHashMap(Math.max(3, 2 * (nodes.getKeysValsCount() / 2) / nodes.getKeysValsCount()));
}
tags.put(fieldDecoder.decodeString(keyIndex), fieldDecoder.decodeString(valueIndex));

Wyświetl plik

@ -97,6 +97,10 @@ public class FeatureRenderer implements Consumer<FeatureCollector.Feature>, Clos
coords[i] = origCoords[i].copy();
}
for (int zoom = feature.getMaxZoom(); zoom >= feature.getMinZoom(); zoom--) {
double minSize = feature.getMinPixelSizeAtZoom(zoom);
if (minSize > 0 && feature.getSourceFeaturePixelSizeAtZoom(zoom) < minSize) {
continue;
}
Map<String, Object> attrs = feature.getAttrsAtZoom(zoom);
double buffer = feature.getBufferPixelsAtZoom(zoom) / 256;
int tilesAtZoom = 1 << zoom;
@ -207,7 +211,7 @@ public class FeatureRenderer implements Consumer<FeatureCollector.Feature>, Clos
}
Map<String, Object> attrs = feature.getAttrsAtZoom(sliced.zoomLevel());
if (numPointsAttr != null) {
// if profile wants the original number of points that the simplified but untiled geometry started with
// if profile wants the original number off points that the simplified but untiled geometry started with
attrs = new HashMap<>(attrs);
attrs.put(numPointsAttr, geom.getNumPoints());
}

Wyświetl plik

@ -122,7 +122,7 @@ class GeometryCoordinateSequences {
static Geometry reassemblePolygons(List<List<CoordinateSequence>> groups) throws GeometryException {
int numGeoms = groups.size();
if (numGeoms == 1) {
return reassemblePolygon(groups.get(0));
return reassemblePolygon(groups.getFirst());
} else {
Polygon[] polygons = new Polygon[numGeoms];
for (int i = 0; i < numGeoms; i++) {
@ -135,7 +135,7 @@ class GeometryCoordinateSequences {
/** Returns a {@link Polygon} built from all outer/inner rings in {@code group}, reversing all inner rings. */
private static Polygon reassemblePolygon(List<CoordinateSequence> group) throws GeometryException {
try {
LinearRing first = GeoUtils.JTS_FACTORY.createLinearRing(group.get(0));
LinearRing first = GeoUtils.JTS_FACTORY.createLinearRing(group.getFirst());
LinearRing[] rest = new LinearRing[group.size() - 1];
for (int j = 1; j < group.size(); j++) {
CoordinateSequence seq = group.get(j);

Wyświetl plik

@ -258,7 +258,7 @@ public class TiledGeometry {
TileCoord tile = TileCoord.ofXYZ(wrappedX, y, z);
double tileY = worldY - y;
tileContents.computeIfAbsent(tile, t -> List.of(new ArrayList<>()))
.get(0)
.getFirst()
.add(GeoUtils.coordinateSequence(tileX * 256, tileY * 256));
}
}
@ -384,7 +384,7 @@ public class TiledGeometry {
for (var entry : inProgressShapes.entrySet()) {
TileCoord tileID = entry.getKey();
List<CoordinateSequence> inSeqs = entry.getValue();
if (area && inSeqs.get(0).size() < 4) {
if (area && inSeqs.getFirst().size() < 4) {
// not enough points in outer polygon, ignore
continue;
}
@ -573,20 +573,20 @@ public class TiledGeometry {
}
/*
A tile is inside a filled region when there is an odd number of vertical edges to the left and right
for example a simple shape:
---------
out | in | out
(0/2) | (1/1) | (2/0)
---------
or a more complex shape
--------- ---------
out | in | out | in |
(0/4) | (1/3) | (2/2) | (3/1) |
| --------- |
-------------------------
So we keep track of this number by xor'ing the left and right fills repeatedly,
then and'ing them together at the end.
*/

Wyświetl plik

@ -38,7 +38,7 @@ public class ProcessInfo {
for (GarbageCollectorMXBean garbageCollectorMXBean : ManagementFactory.getGarbageCollectorMXBeans()) {
if (garbageCollectorMXBean instanceof NotificationEmitter emitter) {
emitter.addNotificationListener((notification, handback) -> {
if (notification.getUserData()instanceof CompositeData compositeData) {
if (notification.getUserData() instanceof CompositeData compositeData) {
var info = GarbageCollectionNotificationInfo.from(compositeData);
GcInfo gcInfo = info.getGcInfo();
postGcMemoryUsage.set(gcInfo.getMemoryUsageAfterGc().entrySet().stream()
@ -142,7 +142,7 @@ public class ProcessInfo {
* Returns the total amount of memory available on the system if available.
*/
public static OptionalLong getSystemMemoryBytes() {
if (ManagementFactory.getOperatingSystemMXBean()instanceof com.sun.management.OperatingSystemMXBean osBean) {
if (ManagementFactory.getOperatingSystemMXBean() instanceof com.sun.management.OperatingSystemMXBean osBean) {
return OptionalLong.of(osBean.getTotalMemorySize());
} else {
return OptionalLong.empty();

Wyświetl plik

@ -10,15 +10,12 @@ import javax.annotation.concurrent.Immutable;
* A utility for measuring the wall and CPU time that this JVM consumes between snapshots.
* <p>
* For example:
*
* <pre>
* {@code
* {@snippet :
* var start = ProcessTime.now();
* // do expensive work...
* var end - ProcessTime.now();
* var end = ProcessTime.now();
* LOGGER.log("Expensive work took " + end.minus(start));
* }
* </pre>
*/
@Immutable
public record ProcessTime(Duration wall, Optional<Duration> cpu, Duration gc) {

Wyświetl plik

@ -275,7 +275,7 @@ public class ProgressLoggers {
/** Adds the CPU utilization of every thread starting with {@code prefix} since the last log to output. */
public ProgressLoggers addThreadPoolStats(String name, String prefix) {
boolean first = loggers.isEmpty() || !(loggers.get(loggers.size() - 1) instanceof WorkerPipelineLogger);
boolean first = loggers.isEmpty() || !(loggers.getLast() instanceof WorkerPipelineLogger);
try {
Map<Long, ProcessInfo.ThreadState> lastThreads = ProcessInfo.getThreadStats();
AtomicLong lastTime = new AtomicLong(System.nanoTime());

Wyświetl plik

@ -76,11 +76,11 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
}
@Override
public void initialize(TileArchiveMetadata metadata) {
public void initialize() {
if (writeTilesOnly) {
return;
}
writeEntryFlush(new InitializationEntry(metadata));
writeEntryFlush(new InitializationEntry());
}
@Override
@ -204,22 +204,19 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
}
}
record InitializationEntry(TileArchiveMetadata metadata) implements Entry {}
record InitializationEntry() implements Entry {}
record FinishEntry(TileArchiveMetadata metadata) implements Entry {}
private interface TileArchiveMetadataMixin {
private record TileArchiveMetadataMixin(
@JsonIgnore(false)
Envelope bounds();
@JsonIgnore(false) Envelope bounds,
@JsonIgnore(false)
CoordinateXY center();
@JsonIgnore(false) CoordinateXY center,
@JsonIgnore(false)
List<LayerAttrStats.VectorLayer> vectorLayers();
}
@JsonIgnore(false) List<LayerAttrStats.VectorLayer> vectorLayers
) {}
@JsonIncludeProperties({"minX", "maxX", "minY", "maxY"})
private abstract static class EnvelopeMixin {

Wyświetl plik

@ -50,14 +50,8 @@ public final class WriteableProtoStreamArchive extends WriteableStreamArchive {
}
@Override
public void initialize(TileArchiveMetadata metadata) {
writeEntry(
StreamArchiveProto.Entry.newBuilder()
.setInitialization(
StreamArchiveProto.InitializationEntry.newBuilder().setMetadata(toExportData(metadata)).build()
)
.build()
);
public void initialize() {
writeEntry(StreamArchiveProto.Entry.newBuilder().build());
}
@Override

Wyświetl plik

@ -106,7 +106,7 @@ public class AwsOsm {
} else if (results.size() > 1) {
throw new IllegalArgumentException("Found multiple AWS osm download URLs for " + searchQuery + ": " + results);
}
return results.get(0);
return results.getFirst();
}
}

Wyświetl plik

@ -5,12 +5,12 @@ import static java.nio.file.StandardOpenOption.WRITE;
import com.google.common.util.concurrent.RateLimiter;
import com.onthegomap.planetiler.config.PlanetilerConfig;
import com.onthegomap.planetiler.stats.Counter;
import com.onthegomap.planetiler.stats.ProgressLoggers;
import com.onthegomap.planetiler.stats.Stats;
import com.onthegomap.planetiler.worker.WorkerPipeline;
import com.onthegomap.planetiler.worker.RunnableThatThrows;
import com.onthegomap.planetiler.worker.Worker;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.net.URI;
import java.net.URLConnection;
import java.net.http.HttpClient;
@ -18,9 +18,7 @@ import java.net.http.HttpHeaders;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
@ -30,9 +28,9 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -43,15 +41,12 @@ import org.slf4j.LoggerFactory;
* changes.
* <p>
* For example:
*
* <pre>
* {@code
* {@snippet :
* Downloader.create(PlanetilerConfig.defaults())
* .add("natural_earth", "http://url/of/natural_earth.zip", Path.of("natural_earth.zip"))
* .add("osm", "http://url/of/file.osm.pbf", Path.of("file.osm.pbf"))
* .run();
* }
* </pre>
* <p>
* As a shortcut to find the URL of a file to download from the <a href="https://download.geofabrik.de/">Geofabrik
* download site</a>, you can use "geofabrik:extract name" (i.e. "geofabrik:monaco" or "geofabrik:australia") to look up
@ -69,32 +64,26 @@ public class Downloader {
private static final Logger LOGGER = LoggerFactory.getLogger(Downloader.class);
private final PlanetilerConfig config;
private final List<ResourceToDownload> toDownloadList = new ArrayList<>();
private final HttpClient client = HttpClient.newBuilder()
// explicitly follow redirects to capture final redirect url
.followRedirects(HttpClient.Redirect.NEVER).build();
private final HttpClient client;
private final ExecutorService executor;
private final Stats stats;
private final long chunkSizeBytes;
private final ResourceUsage diskSpaceCheck = new ResourceUsage("download");
private final RateLimiter rateLimiter;
Downloader(PlanetilerConfig config, Stats stats, long chunkSizeBytes) {
Downloader(PlanetilerConfig config, long chunkSizeBytes) {
this.rateLimiter = config.downloadMaxBandwidth() == 0 ? null : RateLimiter.create(config.downloadMaxBandwidth());
this.chunkSizeBytes = chunkSizeBytes;
this.config = config;
this.stats = stats;
this.executor = Executors.newSingleThreadExecutor(runnable -> {
Thread thread = new Thread(() -> {
LogUtil.setStage("download");
runnable.run();
});
thread.setDaemon(true);
return thread;
});
this.executor = Executors.newVirtualThreadPerTaskExecutor();
this.client = HttpClient.newBuilder()
// explicitly follow redirects to capture final redirect url
.followRedirects(HttpClient.Redirect.NEVER)
.executor(executor)
.build();
}
public static Downloader create(PlanetilerConfig config, Stats stats) {
return new Downloader(config, stats, config.downloadChunkSizeMB() * 1_000_000L);
public static Downloader create(PlanetilerConfig config) {
return new Downloader(config, config.downloadChunkSizeMB() * 1_000_000L);
}
public static URLConnection getUrlConnection(String urlString, PlanetilerConfig config) throws IOException {
@ -191,145 +180,117 @@ public class Downloader {
}
CompletableFuture<Void> downloadIfNecessary(ResourceToDownload resourceToDownload) {
long existingSize = FileUtils.size(resourceToDownload.output);
return httpHeadFollowRedirects(resourceToDownload.url, 0)
.whenComplete((metadata, err) -> {
if (metadata != null) {
resourceToDownload.metadata.complete(metadata);
} else {
resourceToDownload.metadata.completeExceptionally(err);
}
})
.thenComposeAsync(metadata -> {
if (metadata.size == existingSize) {
LOGGER.info("Skipping {}: {} already up-to-date", resourceToDownload.id, resourceToDownload.output);
return CompletableFuture.completedFuture(null);
} else {
String redirectInfo = metadata.canonicalUrl.equals(resourceToDownload.url) ? "" :
" (redirected to " + metadata.canonicalUrl + ")";
LOGGER.info("Downloading {}{} to {}", resourceToDownload.url, redirectInfo, resourceToDownload.output);
FileUtils.delete(resourceToDownload.output);
FileUtils.createParentDirectories(resourceToDownload.output);
Path tmpPath = resourceToDownload.tmpPath();
FileUtils.delete(tmpPath);
FileUtils.deleteOnExit(tmpPath);
diskSpaceCheck.addDisk(tmpPath, metadata.size, resourceToDownload.id);
diskSpaceCheck.checkAgainstLimits(config.force(), false);
return httpDownload(resourceToDownload, tmpPath)
.thenCompose(result -> {
try {
Files.move(tmpPath, resourceToDownload.output);
return CompletableFuture.completedFuture(null);
} catch (IOException e) {
return CompletableFuture.<Void>failedFuture(e);
}
})
.whenCompleteAsync((result, error) -> {
if (error != null) {
LOGGER.error("Error downloading {} to {}", resourceToDownload.url, resourceToDownload.output, error);
} else {
LOGGER.info("Finished downloading {} to {}", resourceToDownload.url, resourceToDownload.output);
}
FileUtils.delete(tmpPath);
}, executor);
}
}, executor);
return CompletableFuture.runAsync(RunnableThatThrows.wrap(() -> {
LogUtil.setStage("download", resourceToDownload.id);
long existingSize = FileUtils.size(resourceToDownload.output);
var metadata = httpHeadFollowRedirects(resourceToDownload.url, 0);
Path tmpPath = resourceToDownload.tmpPath();
resourceToDownload.metadata.complete(metadata);
if (metadata.size == existingSize) {
LOGGER.info("Skipping {}: {} already up-to-date", resourceToDownload.id, resourceToDownload.output);
return;
}
try {
String redirectInfo = metadata.canonicalUrl.equals(resourceToDownload.url) ? "" :
" (redirected to " + metadata.canonicalUrl + ")";
LOGGER.info("Downloading {}{} to {}", resourceToDownload.url, redirectInfo, resourceToDownload.output);
FileUtils.delete(resourceToDownload.output);
FileUtils.createParentDirectories(resourceToDownload.output);
FileUtils.delete(tmpPath);
FileUtils.deleteOnExit(tmpPath);
diskSpaceCheck.addDisk(tmpPath, metadata.size, resourceToDownload.id);
diskSpaceCheck.checkAgainstLimits(config.force(), false);
httpDownload(resourceToDownload, tmpPath);
Files.move(tmpPath, resourceToDownload.output);
LOGGER.info("Finished downloading {} to {}", resourceToDownload.url, resourceToDownload.output);
} catch (Exception e) { // NOSONAR
LOGGER.error("Error downloading {} to {}", resourceToDownload.url, resourceToDownload.output, e);
throw e;
} finally {
FileUtils.delete(tmpPath);
}
}), executor);
}
private CompletableFuture<ResourceMetadata> httpHeadFollowRedirects(String url, int redirects) {
private ResourceMetadata httpHeadFollowRedirects(String url, int redirects) throws IOException, InterruptedException {
if (redirects > MAX_REDIRECTS) {
throw new IllegalStateException("Exceeded " + redirects + " redirects for " + url);
}
return httpHead(url).thenComposeAsync(response -> response.redirect.isPresent() ?
httpHeadFollowRedirects(response.redirect.get(), redirects + 1) : CompletableFuture.completedFuture(response));
var response = httpHead(url);
return response.redirect.isPresent() ? httpHeadFollowRedirects(response.redirect.get(), redirects + 1) : response;
}
CompletableFuture<ResourceMetadata> httpHead(String url) {
return client
.sendAsync(newHttpRequest(url).method("HEAD", HttpRequest.BodyPublishers.noBody()).build(),
responseInfo -> {
int status = responseInfo.statusCode();
Optional<String> location = Optional.empty();
long contentLength = 0;
HttpHeaders headers = responseInfo.headers();
if (status >= 300 && status < 400) {
location = responseInfo.headers().firstValue(LOCATION);
if (location.isEmpty()) {
throw new IllegalStateException("Received " + status + " but no location header from " + url);
}
} else if (responseInfo.statusCode() != 200) {
throw new IllegalStateException("Bad response: " + responseInfo.statusCode());
} else {
contentLength = headers.firstValueAsLong(CONTENT_LENGTH).orElseThrow();
ResourceMetadata httpHead(String url) throws IOException, InterruptedException {
return client.send(newHttpRequest(url).HEAD().build(),
responseInfo -> {
int status = responseInfo.statusCode();
Optional<String> location = Optional.empty();
long contentLength = 0;
HttpHeaders headers = responseInfo.headers();
if (status >= 300 && status < 400) {
location = responseInfo.headers().firstValue(LOCATION);
if (location.isEmpty()) {
throw new IllegalStateException("Received " + status + " but no location header from " + url);
}
boolean supportsRangeRequest = headers.allValues(ACCEPT_RANGES).contains("bytes");
ResourceMetadata metadata = new ResourceMetadata(location, url, contentLength, supportsRangeRequest);
return HttpResponse.BodyHandlers.replacing(metadata).apply(responseInfo);
})
.thenApply(HttpResponse::body);
}
private CompletableFuture<?> httpDownload(ResourceToDownload resource, Path tmpPath) {
/*
* Alternative using async HTTP client:
*
* return client.sendAsync(newHttpRequest(url).GET().build(), responseInfo -> {
* assertOK(responseInfo);
* return HttpResponse.BodyHandlers.ofFile(path).apply(responseInfo);
*
* But it is slower on large files
*/
return resource.metadata.thenCompose(metadata -> {
String canonicalUrl = metadata.canonicalUrl;
record Range(long start, long end) {
long size() {
return end - start;
} else if (responseInfo.statusCode() != 200) {
throw new IllegalStateException("Bad response: " + responseInfo.statusCode());
} else {
contentLength = headers.firstValueAsLong(CONTENT_LENGTH).orElseThrow();
}
}
List<Range> chunks = new ArrayList<>();
boolean ranges = metadata.acceptRange && config.downloadThreads() > 1;
long chunkSize = ranges ? chunkSizeBytes : metadata.size;
for (long start = 0; start < metadata.size; start += chunkSize) {
long end = Math.min(start + chunkSize, metadata.size);
chunks.add(new Range(start, end));
}
// create an empty file
try {
Files.createFile(tmpPath);
} catch (IOException e) {
return CompletableFuture.failedFuture(new IOException("Failed to create " + resource.output, e));
}
return WorkerPipeline.start("download-" + resource.id, stats)
.readFromTiny("chunks", chunks)
.sinkToConsumer("chunk-downloader", Math.min(config.downloadThreads(), chunks.size()), range -> {
try (var fileChannel = FileChannel.open(tmpPath, WRITE)) {
while (range.size() > 0) {
try (
var inputStream = (ranges || range.start > 0) ? openStreamRange(canonicalUrl, range.start, range.end) :
openStream(canonicalUrl);
var input = new ProgressChannel(Channels.newChannel(inputStream), resource.progress, rateLimiter)
) {
// ensure this file has been allocated up to the start of this block
fileChannel.write(ByteBuffer.allocate(1), range.start);
fileChannel.position(range.start);
long transferred = fileChannel.transferFrom(input, range.start, range.size());
if (transferred == 0) {
throw new IOException("Transferred 0 bytes but " + range.size() + " expected: " + canonicalUrl);
} else if (transferred != range.size() && !metadata.acceptRange) {
throw new IOException(
"Transferred " + transferred + " bytes but " + range.size() + " expected: " + canonicalUrl +
" and server does not support range requests");
}
range = new Range(range.start + transferred, range.end);
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
boolean supportsRangeRequest = headers.allValues(ACCEPT_RANGES).contains("bytes");
ResourceMetadata metadata = new ResourceMetadata(location, url, contentLength, supportsRangeRequest);
return HttpResponse.BodyHandlers.replacing(metadata).apply(responseInfo);
}).body();
}
private void httpDownload(ResourceToDownload resource, Path tmpPath)
throws ExecutionException, InterruptedException {
var metadata = resource.metadata().get();
String canonicalUrl = metadata.canonicalUrl();
record Range(long start, long end) {}
List<Range> chunks = new ArrayList<>();
boolean ranges = metadata.acceptRange && config.downloadThreads() > 1;
long chunkSize = ranges ? chunkSizeBytes : metadata.size;
for (long start = 0; start < metadata.size; start += chunkSize) {
long end = Math.min(start + chunkSize, metadata.size);
chunks.add(new Range(start, end));
}
FileUtils.setLength(tmpPath, metadata.size);
Semaphore perFileLimiter = new Semaphore(config.downloadThreads());
Worker.joinFutures(chunks.stream().map(range -> CompletableFuture.runAsync(RunnableThatThrows.wrap(() -> {
LogUtil.setStage("download", resource.id);
perFileLimiter.acquire();
var counter = resource.progress.counterForThread();
try (
var fc = FileChannel.open(tmpPath, WRITE);
var inputStream = (ranges || range.start > 0) ?
openStreamRange(canonicalUrl, range.start, range.end) :
openStream(canonicalUrl);
) {
long offset = range.start;
byte[] buffer = new byte[16384];
int read;
while (offset < range.end && (read = inputStream.read(buffer, 0, 16384)) >= 0) {
counter.incBy(read);
if (rateLimiter != null) {
rateLimiter.acquire(read);
}
}).done();
});
int position = 0;
int remaining = read;
while (remaining > 0) {
int written = fc.write(ByteBuffer.wrap(buffer, position, remaining), offset);
if (written <= 0) {
throw new IOException("Failed to write to " + tmpPath);
}
position += written;
remaining -= written;
offset += written;
}
}
} finally {
perFileLimiter.release();
}
}), executor)).toArray(CompletableFuture[]::new)).get();
}
private HttpRequest.Builder newHttpRequest(String url) {
@ -341,11 +302,12 @@ public class Downloader {
record ResourceMetadata(Optional<String> redirect, String canonicalUrl, long size, boolean acceptRange) {}
record ResourceToDownload(
String id, String url, Path output, CompletableFuture<ResourceMetadata> metadata, AtomicLong progress
String id, String url, Path output, CompletableFuture<ResourceMetadata> metadata,
Counter.MultiThreadCounter progress
) {
ResourceToDownload(String id, String url, Path output) {
this(id, url, output, new CompletableFuture<>(), new AtomicLong(0));
this(id, url, output, new CompletableFuture<>(), Counter.newMultiThreadCounter());
}
public Path tmpPath() {
@ -356,33 +318,4 @@ public class Downloader {
return progress.get();
}
}
/**
* Wrapper for a {@link ReadableByteChannel} that captures progress information.
*/
private record ProgressChannel(ReadableByteChannel inner, AtomicLong progress, RateLimiter rateLimiter)
implements ReadableByteChannel {
@Override
public int read(ByteBuffer dst) throws IOException {
int n = inner.read(dst);
if (n > 0) {
if (rateLimiter != null) {
rateLimiter.acquire(n);
}
progress.addAndGet(n);
}
return n;
}
@Override
public boolean isOpen() {
return inner.isOpen();
}
@Override
public void close() throws IOException {
inner.close();
}
}
}

Wyświetl plik

@ -1,8 +1,13 @@
package com.onthegomap.planetiler.util;
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.WRITE;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.ClosedFileSystemException;
import java.nio.file.FileStore;
import java.nio.file.FileSystem;
@ -263,7 +268,7 @@ public class FileUtils {
* @throws UncheckedIOException if an IO exception occurs
*/
public static void safeCopy(InputStream inputStream, Path destPath) {
try (var outputStream = Files.newOutputStream(destPath, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
try (var outputStream = Files.newOutputStream(destPath, StandardOpenOption.CREATE, WRITE)) {
int totalSize = 0;
int nBytes;
@ -310,7 +315,7 @@ public class FileUtils {
try (
var out = Files.newOutputStream(destination, StandardOpenOption.CREATE_NEW,
StandardOpenOption.WRITE)
WRITE)
) {
totalEntryArchive++;
while ((nBytes = zip.read(buffer)) > 0) {
@ -366,4 +371,16 @@ public class FileUtils {
return true;
}
}
/** Expands the file at {@code path} to {@code size} bytes. */
public static void setLength(Path path, long size) {
try (var fc = FileChannel.open(path, CREATE, WRITE)) {
int written = fc.write(ByteBuffer.allocate(1), size - 1);
if (written != 1) {
throw new IOException("Unable to expand " + path + " to " + size);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}

Wyświetl plik

@ -106,7 +106,7 @@ public class Geofabrik {
"Multiple " + name + " for '" + searchQuery + "': " + values.stream().map(d -> d.id).collect(
Collectors.joining(", ")));
} else if (values.size() == 1) {
return values.get(0).urls.get("pbf");
return values.getFirst().urls.get("pbf");
} else {
return null;
}

Wyświetl plik

@ -3,7 +3,6 @@ package com.onthegomap.planetiler.util;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.onthegomap.planetiler.archive.WriteableTileArchive;
import com.onthegomap.planetiler.mbtiles.Mbtiles;
import com.onthegomap.planetiler.render.RenderedFeature;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -11,7 +10,6 @@ import java.util.Optional;
import java.util.OptionalInt;
import java.util.TreeMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Consumer;
import javax.annotation.concurrent.NotThreadSafe;
import javax.annotation.concurrent.ThreadSafe;
@ -27,7 +25,7 @@ import javax.annotation.concurrent.ThreadSafe;
* @see <a href="https://github.com/mapbox/mbtiles-spec/blob/master/1.3/spec.md#content">MBtiles spec</a>
*/
@ThreadSafe
public class LayerAttrStats implements Consumer<RenderedFeature> {
public class LayerAttrStats {
/*
* This utility is called for billions of features by multiple threads when processing the planet which can make
* access to shared data structures a bottleneck. So give each thread an individual ThreadLocalLayerStatsHandler to
@ -63,6 +61,11 @@ public class LayerAttrStats implements Consumer<RenderedFeature> {
.toList();
}
/** Shortcut for tests */
void accept(String layer, int zoom, String key, Object value) {
handlerForThread().forZoom(zoom).forLayer(layer).accept(key, value);
}
public enum FieldType {
@JsonProperty("Number")
NUMBER,
@ -114,7 +117,7 @@ public class LayerAttrStats implements Consumer<RenderedFeature> {
/** Accepts features from a single thread that will be combined across all threads in {@link #getTileStats()}. */
@NotThreadSafe
private class ThreadLocalHandler implements Consumer<RenderedFeature> {
private class ThreadLocalHandler implements Updater {
private final Map<String, StatsForLayer> layers = new TreeMap<>();
@ -123,42 +126,50 @@ public class LayerAttrStats implements Consumer<RenderedFeature> {
}
@Override
public void accept(RenderedFeature feature) {
var vectorTileFeature = feature.vectorTileFeature();
var stats = layers.computeIfAbsent(vectorTileFeature.layer(), StatsForLayer::new);
stats.expandZoomRangeToInclude(feature.tile().z());
for (var entry : vectorTileFeature.attrs().entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
FieldType fieldType = null;
if (value instanceof Number) {
fieldType = FieldType.NUMBER;
} else if (value instanceof Boolean) {
fieldType = FieldType.BOOLEAN;
} else if (value != null) {
fieldType = FieldType.STRING;
}
if (fieldType != null) {
// widen different types to string
stats.fields.merge(key, fieldType, FieldType::merge);
}
}
public Updater.ForZoom forZoom(int zoom) {
return layer -> {
var stats = layers.computeIfAbsent(layer, StatsForLayer::new);
stats.expandZoomRangeToInclude(zoom);
return (key, value) -> {
FieldType fieldType = null;
if (value instanceof Number) {
fieldType = FieldType.NUMBER;
} else if (value instanceof Boolean) {
fieldType = FieldType.BOOLEAN;
} else if (value != null) {
fieldType = FieldType.STRING;
}
if (fieldType != null) {
// widen different types to string
stats.fields.merge(key, fieldType, FieldType::merge);
}
};
};
}
}
/**
* Returns a handler optimized for accepting features from a single thread.
* <p>
* Use this instead of {@link #accept(RenderedFeature)}
*/
public Consumer<RenderedFeature> handlerForThread() {
public Updater handlerForThread() {
return layerStats.get();
}
@Override
public void accept(RenderedFeature feature) {
handlerForThread().accept(feature);
public interface Updater {
ForZoom forZoom(int zoom);
interface ForZoom {
ForZoom NOOP = layer -> (key, value) -> {
};
ForLayer forLayer(String layer);
interface ForLayer {
void accept(String key, Object value);
}
}
}
private static class StatsForLayer {

Wyświetl plik

@ -14,7 +14,7 @@ public class LogUtil {
/** Prepends {@code [stage]} to all subsequent logs from this thread. */
public static void setStage(String stage) {
MDC.put(STAGE_KEY, stage);
MDC.put(STAGE_KEY, "[%s] ".formatted(stage));
}
/** Removes {@code [stage]} from subsequent logs from this thread. */
@ -24,7 +24,8 @@ public class LogUtil {
/** Returns the current {@code [stage]} value prepended to log for this thread. */
public static String getStage() {
return MDC.get(STAGE_KEY);
// strip out the "[stage] " wrapper
return MDC.get(STAGE_KEY) instanceof String s ? s.substring(1, s.length() - 2) : null;
}
/** Prepends {@code [parent:child]} to all subsequent logs from this thread. */

Wyświetl plik

@ -13,19 +13,15 @@ import com.onthegomap.planetiler.collection.FeatureGroup;
* To sort by a field descending, specify its range from high to low.
* <p>
* For example this SQL ordering:
*
* <pre>
* {@code
*
* {@snippet lang = "sql" :
* ORDER BY rank ASC,
* population DESC,
* length(name) ASC
* }
* </pre>
* <p>
* would become:
*
* <pre>
* {@code
* {@snippet :
* feature.setSortKey(
* SortKey
* .orderByInt(rank, MIN_RANK, MAX_RANK)
@ -125,7 +121,7 @@ public class SortKey {
}
int levels = end + 1 - start;
if (value < start || value > end) {
value = Math.max(start, Math.min(end, value));
value = Math.clamp(value, start, end);
}
return accumulate(value, start, levels);
}
@ -141,7 +137,7 @@ public class SortKey {
return thenByDouble(start - value, end, start, levels);
}
if (value < start || value > end) {
value = Math.max(start, Math.min(end, value));
value = Math.clamp(value, start, end);
}
int intVal = doubleRangeToInt(value, start, end, levels);
@ -160,7 +156,7 @@ public class SortKey {
}
assert start > 0 : "log thresholds must be > 0 got [" + start + ", " + end + "]";
if (value < start || value > end) {
value = Math.max(start, Math.min(end, value));
value = Math.clamp(value, start, end);
}
int intVal = doubleRangeToInt(Math.log(value), Math.log(start), Math.log(end), levels);

Wyświetl plik

@ -62,7 +62,7 @@ public class TopOsmTiles {
TopOsmTiles(PlanetilerConfig config, Stats stats) {
this.config = config;
this.stats = stats;
downloader = Downloader.create(config, stats);
downloader = Downloader.create(config);
}
Reader fetch(LocalDate date) throws IOException {

Wyświetl plik

@ -128,7 +128,7 @@ public class Translations {
Map<String, String> result = new HashMap<>();
for (var entry : tags.entrySet()) {
String key = entry.getKey();
if (key.startsWith("name:") && entry.getValue()instanceof String stringVal) {
if (key.startsWith("name:") && entry.getValue() instanceof String stringVal) {
result.put(key, stringVal);
}
}

Wyświetl plik

@ -18,4 +18,8 @@ public interface RunnableThatThrows {
throwFatalException(e);
}
}
static Runnable wrap(RunnableThatThrows thrower) {
return thrower::runAndWrapException;
}
}

Wyświetl plik

@ -17,24 +17,21 @@ import java.util.function.Consumer;
* A mini-framework for chaining sequential steps that run in dedicated threads with a queue between each.
* <p>
* For example:
*
* <pre>
* {@code
* {@snippet :
* WorkerPipeline.start("name", stats)
* .readFrom("reader", List.of(1, 2, 3))
* .addBuffer("reader_queue", 10)
* .addWorker("process", 2, (i, next) -> next.accept(doExpensiveWork(i))
* .addWorker("process", 2, (i, next) -> next.accept(doExpensiveWork(i)))
* .addBuffer("writer_queue", 10)
* .sinkToConsumer("writer", 1, result -> writeToDisk(result))
* .await();
* }
* </pre>
* <p>
* NOTE: to do any forking/joining, you must construct and wire-up queues and each sequence of steps manually.
*
* @param <T> input type of this pipeline
*/
public record WorkerPipeline<T> (
public record WorkerPipeline<T>(
String name,
WorkerPipeline<?> previous,
WorkQueue<T> inputQueue,
@ -219,7 +216,7 @@ public record WorkerPipeline<T> (
*
* @param <O> type of elements that the next step must process
*/
public record Builder<O> (
public record Builder<O>(
String prefix,
String name,
// keep track of previous elements so that build can wire-up the computation graph

Wyświetl plik

@ -1,4 +1,3 @@
syntax = "proto3";
package com.onthegomap.planetiler.proto;
@ -19,7 +18,6 @@ message TileEntry {
}
message InitializationEntry {
Metadata metadata = 1;
}
message FinishEntry {

Wyświetl plik

@ -2,7 +2,7 @@ appenders=console
appender.console.type=Console
appender.console.name=STDOUT
appender.console.layout.type=PatternLayout
appender.console.layout.pattern=%highlight{$${uptime:now} %level{length=3} %notEmpty{[%X{stage}] }- %msg%n%throwable}{FATAL=red, ERROR=red, WARN=YELLOW, INFO=normal, DEBUG=normal, TRACE=normal}
appender.console.layout.pattern=%highlight{$${uptime:now} %level{length=3} %X{stage}- %msg%n%throwable}{FATAL=red, ERROR=red, WARN=YELLOW, INFO=normal, DEBUG=normal, TRACE=normal}
packages=com.onthegomap.planetiler.util.log4j
rootLogger.level=debug
rootLogger.appenderRefs=stdout

Wyświetl plik

@ -493,6 +493,34 @@ class FeatureCollectorTest {
assertFalse(iter.hasNext());
}
@Test
void testInnermostPoint() {
/*
_____
| · __|
|__|
*/
var sourceLine = newReaderFeature(newPolygon(worldToLatLon(
0, 0,
1, 0,
1, 0.5,
0.5, 0.5,
0.5, 1,
0, 1,
0, 0
)), Map.of());
var fc = factory.get(sourceLine);
fc.innermostPoint("layer").setZoomRange(0, 10);
var iter = fc.iterator();
var item = iter.next();
assertEquals(GeometryType.POINT, item.getGeometryType());
assertEquals(round(newPoint(0.28, 0.28)), round(item.getGeometry(), 1e2));
assertFalse(iter.hasNext());
}
@Test
void testMultiPolygonCoercion() throws GeometryException {
var sourceLine = newReaderFeature(newMultiPolygon(
@ -614,5 +642,4 @@ class FeatureCollectorTest {
)
), collector);
}
}

Wyświetl plik

@ -10,7 +10,9 @@ import com.onthegomap.planetiler.collection.Hppc;
import com.onthegomap.planetiler.geo.GeometryException;
import com.onthegomap.planetiler.geo.GeometryType;
import com.onthegomap.planetiler.mbtiles.Mbtiles;
import com.onthegomap.planetiler.stats.Stats;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -20,11 +22,14 @@ import java.util.function.UnaryOperator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.ValueSource;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryCollection;
import org.locationtech.jts.geom.LineString;
import org.locationtech.jts.geom.Point;
import org.locationtech.jts.geom.Polygon;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKBReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -853,4 +858,27 @@ class FeatureMergeTest {
)
);
}
@ParameterizedTest
@ValueSource(strings = {
"/issue_700/exception_1.wkb",
"/issue_700/exception_2.wkb",
"/issue_700/exception_3.wkb",
"/issue_700/exception_4.wkb",
"/issue_700/exception_5.wkb",
"/issue_700/exception_6.wkb",
"/issue_700/exception_7.wkb",
"/issue_700/exception_8.wkb",
"/issue_700/exception_9.wkb",
})
void testIssue700BufferUnionUnbufferFailure(String path) throws IOException, ParseException {
try (var is = getClass().getResource(path).openStream()) {
GeometryCollection collection = (GeometryCollection) new WKBReader().read(is.readAllBytes());
List<Geometry> geometries = new ArrayList<>();
for (int i = 0; i < collection.getNumGeometries(); i++) {
geometries.add(collection.getGeometryN(i));
}
FeatureMerge.bufferUnionUnbuffer(0.5, geometries, Stats.inMemory());
}
}
}

Wyświetl plik

@ -87,6 +87,8 @@ class PlanetilerTests {
private static final double Z13_WIDTH = 1d / Z13_TILES;
private static final int Z12_TILES = 1 << 12;
private static final double Z12_WIDTH = 1d / Z12_TILES;
private static final int Z11_TILES = 1 << 11;
private static final double Z11_WIDTH = 1d / Z11_TILES;
private static final int Z4_TILES = 1 << 4;
private static final Polygon WORLD_POLYGON = newPolygon(
worldCoordinateList(
@ -592,6 +594,15 @@ class PlanetilerTests {
return points;
}
public List<Coordinate> z14PixelRectangle(double min, double max) {
List<Coordinate> points = rectangleCoordList(min / 256d, max / 256d);
points.forEach(c -> {
c.x = GeoUtils.getWorldLon(0.5 + c.x * Z14_WIDTH);
c.y = GeoUtils.getWorldLat(0.5 + c.y * Z14_WIDTH);
});
return points;
}
public List<Coordinate> z14CoordinatePixelList(double... coords) {
return z14CoordinateList(DoubleStream.of(coords).map(c -> c / 256d).toArray());
}
@ -827,7 +838,7 @@ class PlanetilerTests {
var tileContents = results.tiles.get(TileCoord.ofXYZ(0, 0, 0));
assertEquals(1, tileContents.size());
Geometry geom = tileContents.get(0).geometry().geom();
Geometry geom = tileContents.getFirst().geometry().geom();
assertTrue(geom instanceof MultiPolygon, geom.toString());
MultiPolygon multiPolygon = (MultiPolygon) geom;
assertSameNormalizedFeature(newPolygon(
@ -1884,7 +1895,7 @@ class PlanetilerTests {
var point = newPoint(tileX, tileY);
assertEquals(1, problematicTile.size());
var geomCompare = problematicTile.get(0).geometry();
var geomCompare = problematicTile.getFirst().geometry();
geomCompare.validate();
var geom = geomCompare.geom();
@ -2341,6 +2352,158 @@ class PlanetilerTests {
assertEquals(bboxResult.tiles, polyResult.tiles);
}
@Test
void testSimplePolygon() throws Exception {
List<Coordinate> points = z14PixelRectangle(0, 40);
var results = runWithReaderFeatures(
Map.of("threads", "1"),
List.of(
newReaderFeature(newPolygon(points), Map.of())
),
(in, features) -> features.polygon("layer")
.setZoomRange(0, 14)
.setBufferPixels(0)
.setMinPixelSize(10) // should only show up z14 (40) z13 (20) and z12 (10)
);
assertEquals(Map.ofEntries(
newTileEntry(Z12_TILES / 2, Z12_TILES / 2, 12, List.of(
feature(newPolygon(rectangleCoordList(0, 10)), Map.of())
)),
newTileEntry(Z13_TILES / 2, Z13_TILES / 2, 13, List.of(
feature(newPolygon(rectangleCoordList(0, 20)), Map.of())
)),
newTileEntry(Z14_TILES / 2, Z14_TILES / 2, 14, List.of(
feature(newPolygon(rectangleCoordList(0, 40)), Map.of())
))
), results.tiles);
}
@Test
void testCentroidWithPolygonMinSize() throws Exception {
List<Coordinate> points = z14PixelRectangle(0, 40);
var results = runWithReaderFeatures(
Map.of("threads", "1"),
List.of(
newReaderFeature(newPolygon(points), Map.of())
),
(in, features) -> features.centroid("layer")
.setZoomRange(0, 14)
.setBufferPixels(0)
.setMinPixelSize(10) // should only show up z14 (40) z13 (20) and z12 (10)
);
assertEquals(Map.ofEntries(
newTileEntry(Z12_TILES / 2, Z12_TILES / 2, 12, List.of(
feature(newPoint(5, 5), Map.of())
)),
newTileEntry(Z13_TILES / 2, Z13_TILES / 2, 13, List.of(
feature(newPoint(10, 10), Map.of())
)),
newTileEntry(Z14_TILES / 2, Z14_TILES / 2, 14, List.of(
feature(newPoint(20, 20), Map.of())
))
), results.tiles);
}
@Test
void testCentroidWithLineMinSize() throws Exception {
List<Coordinate> points = z14CoordinatePixelList(0, 4, 40, 4);
var results = runWithReaderFeatures(
Map.of("threads", "1"),
List.of(
newReaderFeature(newLineString(points), Map.of())
),
(in, features) -> features.centroid("layer")
.setZoomRange(0, 14)
.setBufferPixels(0)
.setMinPixelSize(10) // should only show up z14 (40) z13 (20) and z12 (10)
);
assertEquals(Map.ofEntries(
newTileEntry(Z12_TILES / 2, Z12_TILES / 2, 12, List.of(
feature(newPoint(5, 1), Map.of())
)),
newTileEntry(Z13_TILES / 2, Z13_TILES / 2, 13, List.of(
feature(newPoint(10, 2), Map.of())
)),
newTileEntry(Z14_TILES / 2, Z14_TILES / 2, 14, List.of(
feature(newPoint(20, 4), Map.of())
))
), results.tiles);
}
@Test
void testAttributeMinSizeLine() throws Exception {
List<Coordinate> points = z14CoordinatePixelList(0, 4, 40, 4);
var results = runWithReaderFeatures(
Map.of("threads", "1"),
List.of(
newReaderFeature(newLineString(points), Map.of())
),
(in, features) -> features.line("layer")
.setZoomRange(11, 14)
.setBufferPixels(0)
.setAttrWithMinSize("a", "1", 10)
.setAttrWithMinSize("b", "2", 20)
.setAttrWithMinSize("c", "3", 40)
.setAttrWithMinSize("d", "4", 40, 0, 13) // should show up at z13 and above
);
assertEquals(Map.ofEntries(
newTileEntry(Z11_TILES / 2, Z11_TILES / 2, 11, List.of(
feature(newLineString(0, 0.5, 5, 0.5), Map.of())
)),
newTileEntry(Z12_TILES / 2, Z12_TILES / 2, 12, List.of(
feature(newLineString(0, 1, 10, 1), Map.of("a", "1"))
)),
newTileEntry(Z13_TILES / 2, Z13_TILES / 2, 13, List.of(
feature(newLineString(0, 2, 20, 2), Map.of("a", "1", "b", "2", "d", "4"))
)),
newTileEntry(Z14_TILES / 2, Z14_TILES / 2, 14, List.of(
feature(newLineString(0, 4, 40, 4), Map.of("a", "1", "b", "2", "c", "3", "d", "4"))
))
), results.tiles);
}
@Test
void testAttributeMinSizePoint() throws Exception {
List<Coordinate> points = z14CoordinatePixelList(0, 4, 40, 4);
var results = runWithReaderFeatures(
Map.of("threads", "1"),
List.of(
newReaderFeature(newLineString(points), Map.of())
),
(in, features) -> features.centroid("layer")
.setZoomRange(11, 14)
.setBufferPixels(0)
.setAttrWithMinSize("a", "1", 10)
.setAttrWithMinSize("b", "2", 20)
.setAttrWithMinSize("c", "3", 40)
.setAttrWithMinSize("d", "4", 40, 0, 13) // should show up at z13 and above
);
assertEquals(Map.ofEntries(
newTileEntry(Z11_TILES / 2, Z11_TILES / 2, 11, List.of(
feature(newPoint(2.5, 0.5), Map.of())
)),
newTileEntry(Z12_TILES / 2, Z12_TILES / 2, 12, List.of(
feature(newPoint(5, 1), Map.of("a", "1"))
)),
newTileEntry(Z13_TILES / 2, Z13_TILES / 2, 13, List.of(
feature(newPoint(10, 2), Map.of("a", "1", "b", "2", "d", "4"))
)),
newTileEntry(Z14_TILES / 2, Z14_TILES / 2, 14, List.of(
feature(newPoint(20, 4), Map.of("a", "1", "b", "2", "c", "3", "d", "4"))
))
), results.tiles);
}
@Test
void testBoundFiltersFill() throws Exception {
var polyResultz8 = runForBoundsTest(8, 8, "polygon", TestUtils.pathToResource("bottomrightearth.poly").toString());

Wyświetl plik

@ -160,7 +160,7 @@ class VectorTileTest {
List<VectorTile.Feature> decoded = VectorTile.decode(encoded);
assertEquals(1, decoded.size());
Map<String, Object> decodedAttributes = decoded.get(0).attrs();
Map<String, Object> decodedAttributes = decoded.getFirst().attrs();
assertEquals("value1", decodedAttributes.get("key1"));
assertEquals(123L, decodedAttributes.get("key2"));
assertEquals(234.1f, decodedAttributes.get("key3"));
@ -220,7 +220,7 @@ class VectorTileTest {
var features = VectorTile.decode(encoded);
assertEquals(1, features.size());
MultiPolygon mp2 = (MultiPolygon) decodeSilently(features.get(0).geometry());
MultiPolygon mp2 = (MultiPolygon) decodeSilently(features.getFirst().geometry());
assertEquals(mp.getNumGeometries(), mp2.getNumGeometries());
}

Wyświetl plik

@ -8,7 +8,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
import com.onthegomap.planetiler.stats.Stats;
import java.util.List;
import org.geotools.geometry.jts.WKTReader2;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
@ -18,6 +17,7 @@ import org.locationtech.jts.geom.LinearRing;
import org.locationtech.jts.geom.Point;
import org.locationtech.jts.geom.util.AffineTransformation;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
class GeoUtilsTest {
@ -367,7 +367,7 @@ class GeoUtilsTest {
@Test
void testSnapAndFixIssue511() throws ParseException, GeometryException {
var result = GeoUtils.snapAndFixPolygon(new WKTReader2().read(
var result = GeoUtils.snapAndFixPolygon(new WKTReader().read(
"""
MULTIPOLYGON (((198.83750000000003 46.07500000000004, 199.0625 46.375, 199.4375 46.0625, 199.5 46.43750000000001, 199.5625 46, 199.3125 45.5, 198.8912037037037 46.101851851851876, 198.83750000000003 46.07500000000004)), ((198.43750000000003 46.49999999999999, 198.5625 46.43750000000001, 198.6875 46.25, 198.1875 46.25, 198.43750000000003 46.49999999999999)), ((198.6875 46.25, 198.81249999999997 46.062500000000014, 198.6875 46.00000000000002, 198.6875 46.25)), ((196.55199579831933 46.29359243697479, 196.52255639097743 46.941259398496236, 196.5225563909774 46.941259398496236, 196.49999999999997 47.43750000000001, 196.875 47.125, 197 47.5625, 197.47880544905414 46.97729334004497, 197.51505401161464 46.998359569801956, 197.25 47.6875, 198.0625 47.6875, 198.5 46.625, 198.34375 46.546875, 198.34375000000003 46.54687499999999, 197.875 46.3125, 197.875 46.25, 197.875 46.0625, 197.82894736842107 46.20065789473683, 197.25 46.56250000000001, 197.3125 46.125, 196.9375 46.1875, 196.9375 46.21527777777778, 196.73250000000002 46.26083333333334, 196.5625 46.0625, 196.55199579831933 46.29359243697479)), ((196.35213414634146 45.8170731707317, 197.3402027027027 45.93108108108108, 197.875 45.99278846153846, 197.875 45.93750000000002, 197.93749999999997 45.99999999999999, 197.9375 46, 197.90625 45.96874999999999, 197.90625 45.96875, 196.75000000000006 44.81250000000007, 197.1875 45.4375, 196.3125 45.8125, 196.35213414634146 45.8170731707317)), ((195.875 46.124999999999986, 195.8125 46.5625, 196.5 46.31250000000001, 195.9375 46.4375, 195.875 46.124999999999986)), ((196.49999999999997 46.93749999999999, 196.125 46.875, 196.3125 47.125, 196.49999999999997 46.93749999999999)))
"""),
@ -377,7 +377,7 @@ class GeoUtilsTest {
@Test
void testSnapAndFixIssue546() throws GeometryException, ParseException {
var orig = new WKTReader2().read(
var orig = new WKTReader().read(
"""
POLYGON(
(
@ -404,7 +404,7 @@ class GeoUtilsTest {
@Test
void testSnapAndFixIssue546_2() throws GeometryException, ParseException {
var orig = new WKTReader2().read(
var orig = new WKTReader().read(
"""
POLYGON(
(
@ -423,7 +423,7 @@ class GeoUtilsTest {
@Test
void testSnapAndFixIssue546_3() throws GeometryException, ParseException {
var orig = new WKTReader2().read(
var orig = new WKTReader().read(
"""
POLYGON(
(
@ -447,4 +447,30 @@ class GeoUtilsTest {
assertTrue(result.isValid());
assertFalse(result.contains(point));
}
@ParameterizedTest
@CsvSource({
"1,0,0",
"1,10,0",
"1,255,0",
"0.5,0,0",
"0.5,128,0",
"0.5,129,1",
"0.5,256,1",
"0.25,0,0",
"0.25,128,1",
"0.25,129,2",
"0.25,256,2",
})
void minZoomForPixelSize(double worldGeometrySize, double minPixelSize, int expectedMinZoom) {
assertEquals(expectedMinZoom, GeoUtils.minZoomForPixelSize(worldGeometrySize, minPixelSize));
}
@Test
void minZoomForPixelSizesAtZ9_10() {
assertEquals(10, GeoUtils.minZoomForPixelSize(3.1 / (256 << 10), 3));
assertEquals(9, GeoUtils.minZoomForPixelSize(6.1 / (256 << 10), 3));
}
}

Wyświetl plik

@ -188,7 +188,7 @@ class PmtilesTest {
var config = PlanetilerConfig.defaults();
var metadata = new TileArchiveMetadata(new Profile.NullProfile(), config);
in.initialize(metadata);
in.initialize();
var writer = in.newTileWriter();
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 1), new byte[]{0xa, 0x2}, OptionalLong.empty()));
@ -259,7 +259,7 @@ class PmtilesTest {
var channel = new SeekableInMemoryByteChannel(0);
var in = WriteablePmtiles.newWriteToMemory(channel)
) {
in.initialize(input);
in.initialize();
var writer = in.newTileWriter();
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0xa, 0x2}, OptionalLong.empty()));
@ -299,7 +299,7 @@ class PmtilesTest {
var config = PlanetilerConfig.defaults();
var metadata = new TileArchiveMetadata(new Profile.NullProfile(), config);
in.initialize(metadata);
in.initialize();
var writer = in.newTileWriter();
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0xa, 0x2}, OptionalLong.of(42)));
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 1), new byte[]{0xa, 0x2}, OptionalLong.of(42)));
@ -337,7 +337,7 @@ class PmtilesTest {
var config = PlanetilerConfig.defaults();
var metadata = new TileArchiveMetadata(new Profile.NullProfile(), config);
in.initialize(metadata);
in.initialize();
var writer = in.newTileWriter();
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 1), new byte[]{0xa, 0x2}, OptionalLong.of(42)));
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0xa, 0x2}, OptionalLong.of(42)));
@ -372,7 +372,7 @@ class PmtilesTest {
var config = PlanetilerConfig.defaults();
var metadata = new TileArchiveMetadata(new Profile.NullProfile(), config);
in.initialize(metadata);
in.initialize();
var writer = in.newTileWriter();
int ENTRIES = 20000;

Wyświetl plik

@ -1,9 +1,11 @@
package com.onthegomap.planetiler.reader;
import static com.onthegomap.planetiler.TestUtils.newPoint;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.onthegomap.planetiler.TestUtils;
import com.onthegomap.planetiler.config.Bounds;
import com.onthegomap.planetiler.geo.GeoUtils;
import com.onthegomap.planetiler.stats.Stats;
import com.onthegomap.planetiler.util.FileUtils;
@ -11,9 +13,9 @@ import com.onthegomap.planetiler.worker.WorkerPipeline;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import org.geotools.api.data.SimpleFeatureStore;
import org.geotools.api.referencing.FactoryException;
import org.geotools.api.referencing.operation.TransformException;
@ -29,12 +31,18 @@ import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
import org.junit.jupiter.api.io.TempDir;
import org.locationtech.jts.geom.Envelope;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.Point;
class ShapefileReaderTest {
@TempDir
private Path tempDir;
private static final Envelope env = newPoint(-77.12911152370515, 38.79930767201779).getEnvelopeInternal();
private static final int numInEnv = 18;
static {
env.expandBy(0.1);
}
@Test
@Timeout(30)
@ -55,6 +63,35 @@ class ShapefileReaderTest {
testReadShapefile(dest.resolve("shapefile").resolve("stations.shp"));
}
@Test
@Timeout(30)
void testReadShapefileWithBoundingBox() {
var dest = tempDir.resolve("shapefile.zip");
FileUtils.unzipResource("/shapefile.zip", dest);
try (
var reader = new ShapefileReader(null, "test", dest.resolve("shapefile").resolve("stations.shp"), new Bounds(env))
) {
for (int i = 1; i <= 2; i++) {
assertEquals(numInEnv, reader.getFeatureCount());
List<Geometry> points = new CopyOnWriteArrayList<>();
WorkerPipeline.start("test", Stats.inMemory())
.fromGenerator("source", reader::readFeatures)
.addBuffer("reader_queue", 100, 1)
.sinkToConsumer("counter", 1, elem -> {
assertTrue(elem.getTag("name") instanceof String);
assertEquals("test", elem.getSource());
assertEquals("stations", elem.getSourceLayer());
points.add(elem.latLonGeometry());
}).await();
assertEquals(numInEnv, points.size());
var gc = GeoUtils.JTS_FACTORY.createGeometryCollection(points.toArray(new Geometry[0]));
var centroid = gc.getCentroid();
assertEquals(-77.0934256, centroid.getX(), 1e-5, "iter " + i);
assertEquals(38.8509022, centroid.getY(), 1e-5, "iter " + i);
}
}
}
@Test
void testReadShapefileLeniently(@TempDir Path dir) throws IOException, TransformException, FactoryException {
var shpPath = dir.resolve("test.shp");
@ -82,7 +119,7 @@ class ShapefileReaderTest {
featureStore.setTransaction(transaction);
var collection = new DefaultFeatureCollection();
var featureBuilder = new SimpleFeatureBuilder(type);
featureBuilder.add(TestUtils.newPoint(1, 2));
featureBuilder.add(newPoint(1, 2));
featureBuilder.add(3);
var feature = featureBuilder.buildFeature(null);
collection.add(feature);
@ -92,11 +129,11 @@ class ShapefileReaderTest {
try (var reader = new ShapefileReader(null, "test", shpPath)) {
assertEquals(1, reader.getFeatureCount());
List<SimpleFeature> features = new ArrayList<>();
List<SimpleFeature> features = new CopyOnWriteArrayList<>();
reader.readFeatures(features::add);
assertEquals(10.5113, features.get(0).latLonGeometry().getCentroid().getX(), 1e-4);
assertEquals(0, features.get(0).latLonGeometry().getCentroid().getY(), 1e-4);
assertEquals(3, features.get(0).getTag("value"));
assertEquals(10.5113, features.getFirst().latLonGeometry().getCentroid().getX(), 1e-4);
assertEquals(0, features.getFirst().latLonGeometry().getCentroid().getY(), 1e-4);
assertEquals(3, features.getFirst().getTag("value"));
}
}
@ -105,8 +142,8 @@ class ShapefileReaderTest {
for (int i = 1; i <= 2; i++) {
assertEquals(86, reader.getFeatureCount());
List<Geometry> points = new ArrayList<>();
List<String> names = new ArrayList<>();
List<Geometry> points = new CopyOnWriteArrayList<>();
List<String> names = new CopyOnWriteArrayList<>();
WorkerPipeline.start("test", Stats.inMemory())
.fromGenerator("source", reader::readFeatures)
.addBuffer("reader_queue", 100, 1)
@ -117,12 +154,13 @@ class ShapefileReaderTest {
points.add(elem.latLonGeometry());
names.add(elem.getTag("name").toString());
}).await();
assertEquals(numInEnv, points.stream().filter(point -> env.contains(point.getCoordinate())).count());
assertEquals(86, points.size());
assertTrue(names.contains("Van Dörn Street"));
var gc = GeoUtils.JTS_FACTORY.createGeometryCollection(points.toArray(new Geometry[0]));
var centroid = gc.getCentroid();
assertEquals(-77.0297995, centroid.getX(), 5, "iter " + i);
assertEquals(38.9119684, centroid.getY(), 5, "iter " + i);
assertEquals(-77.0297995, centroid.getX(), 1e-5, "iter " + i);
assertEquals(38.9119684, centroid.getY(), 1e-5, "iter " + i);
}
}
}

Wyświetl plik

@ -47,7 +47,7 @@ class FeatureRendererTest {
private FeatureCollector collector(Geometry worldGeom) {
var latLonGeom = GeoUtils.worldToLatLonCoords(worldGeom);
return new FeatureCollector.Factory(config, stats)
.get(SimpleFeature.create(latLonGeom, new HashMap<>(0), null, null,
.get(SimpleFeature.create(latLonGeom, HashMap.newHashMap(0), null, null,
1));
}

Wyświetl plik

@ -32,7 +32,7 @@ class WriteableCsvArchiveTest {
final Path csvFile = tempDir.resolve("out.csv");
try (var archive = WriteableCsvArchive.newWriteToFile(format, csvFile, defaultConfig)) {
archive.initialize(defaultMetadata); // ignored
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.of(1)));
@ -67,7 +67,7 @@ class WriteableCsvArchiveTest {
try (
var archive = WriteableCsvArchive.newWriteToFile(TileArchiveConfig.Format.CSV, csvFilePrimary, defaultConfig)
) {
archive.initialize(defaultMetadata); // ignored
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(11, 12, 1), new byte[]{0}, OptionalLong.empty()));
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(21, 22, 2), new byte[]{1}, OptionalLong.empty()));
@ -159,7 +159,7 @@ class WriteableCsvArchiveTest {
final Path csvFile = tempDir.resolve("out.csv");
try (var archive = WriteableCsvArchive.newWriteToFile(TileArchiveConfig.Format.CSV, csvFile, config)) {
archive.initialize(defaultMetadata);
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0, 1}, OptionalLong.empty()));
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 1, 1), new byte[]{2, 3}, OptionalLong.empty()));

Wyświetl plik

@ -32,7 +32,7 @@ import org.locationtech.jts.geom.Envelope;
class WriteableJsonStreamArchiveTest {
private static final StreamArchiveConfig defaultConfig = new StreamArchiveConfig(false, Arguments.of());
private static final TileArchiveMetadata maxMetadataIn =
private static final TileArchiveMetadata MAX_METADATA_IN =
new TileArchiveMetadata("name", "description", "attribution", "version", "type", "format", new Envelope(0, 1, 2, 3),
new CoordinateXY(1.3, 3.7), 1.0, 2, 3,
List.of(
@ -46,7 +46,7 @@ class WriteableJsonStreamArchiveTest {
),
ImmutableMap.of("a", "b", "c", "d"),
TileCompression.GZIP);
private static final String maxMetadataOut = """
private static final String MAX_METADATA_OUT = """
{
"name":"name",
"description":"description",
@ -88,7 +88,7 @@ class WriteableJsonStreamArchiveTest {
"c":"d"
}""".lines().map(String::trim).collect(Collectors.joining(""));
private static final TileArchiveMetadata minMetadataIn =
private static final TileArchiveMetadata MIN_METADATA_IN =
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null, null, null);
private static final String MIN_METADATA_OUT = "{}";
@ -98,21 +98,21 @@ class WriteableJsonStreamArchiveTest {
final Path csvFile = tempDir.resolve("out.json");
try (var archive = WriteableJsonStreamArchive.newWriteToFile(csvFile, defaultConfig)) {
archive.initialize(maxMetadataIn);
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.of(1)));
}
archive.finish(minMetadataIn);
archive.finish(MIN_METADATA_IN);
}
assertEqualsDelimitedJson(
"""
{"type":"initialization","metadata":%s}
{"type":"initialization"}
{"type":"tile","x":0,"y":0,"z":0,"encodedData":"AA=="}
{"type":"tile","x":1,"y":2,"z":3,"encodedData":"AQ=="}
{"type":"finish","metadata":%s}
""".formatted(maxMetadataOut, MIN_METADATA_OUT),
""".formatted(MIN_METADATA_OUT),
Files.readString(csvFile)
);
@ -132,7 +132,7 @@ class WriteableJsonStreamArchiveTest {
final var tile3 = new TileEncodingResult(TileCoord.ofXYZ(41, 42, 4), new byte[]{3}, OptionalLong.empty());
final var tile4 = new TileEncodingResult(TileCoord.ofXYZ(51, 52, 5), new byte[]{4}, OptionalLong.empty());
try (var archive = WriteableJsonStreamArchive.newWriteToFile(csvFilePrimary, defaultConfig)) {
archive.initialize(minMetadataIn);
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(tile0);
tileWriter.write(tile1);
@ -144,16 +144,16 @@ class WriteableJsonStreamArchiveTest {
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(tile4);
}
archive.finish(maxMetadataIn);
archive.finish(MAX_METADATA_IN);
}
assertEqualsDelimitedJson(
"""
{"type":"initialization","metadata":%s}
{"type":"initialization"}
{"type":"tile","x":11,"y":12,"z":1,"encodedData":"AA=="}
{"type":"tile","x":21,"y":22,"z":2,"encodedData":"AQ=="}
{"type":"finish","metadata":%s}
""".formatted(MIN_METADATA_OUT, maxMetadataOut),
""".formatted(MAX_METADATA_OUT),
Files.readString(csvFilePrimary)
);
@ -199,11 +199,11 @@ class WriteableJsonStreamArchiveTest {
final String expectedJson =
"""
{"type":"initialization","metadata":%s}
{"type":"initialization"}
{"type":"tile","x":0,"y":0,"z":0,"encodedData":"AA=="}
{"type":"tile","x":1,"y":2,"z":3,"encodedData":"AQ=="}
{"type":"finish","metadata":%s}
""".formatted(MIN_METADATA_OUT, maxMetadataOut)
""".formatted(MAX_METADATA_OUT)
.replace('\n', ' ');
testTileOptions(tempDir, config, expectedJson);
@ -216,12 +216,12 @@ class WriteableJsonStreamArchiveTest {
final Path csvFile = tempDir.resolve("out.json");
try (var archive = WriteableJsonStreamArchive.newWriteToFile(csvFile, config)) {
archive.initialize(minMetadataIn);
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.empty()));
}
archive.finish(maxMetadataIn);
archive.finish(MAX_METADATA_IN);
}
assertEqualsDelimitedJson(expectedJson, Files.readString(csvFile));

Wyświetl plik

@ -76,7 +76,7 @@ class WriteableProtoStreamArchiveTest {
final var tile0 = new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty());
final var tile1 = new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.of(1));
try (var archive = WriteableProtoStreamArchive.newWriteToFile(csvFile, defaultConfig)) {
archive.initialize(maxMetadataIn);
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(tile0);
tileWriter.write(tile1);
@ -86,7 +86,7 @@ class WriteableProtoStreamArchiveTest {
try (InputStream in = Files.newInputStream(csvFile)) {
assertEquals(
List.of(wrapInit(maxMetadataOut), toEntry(tile0), toEntry(tile1), wrapFinish(minMetadataOut)),
List.of(wrapInit(), toEntry(tile0), toEntry(tile1), wrapFinish(minMetadataOut)),
readAllEntries(in)
);
}
@ -105,7 +105,7 @@ class WriteableProtoStreamArchiveTest {
final var tile3 = new TileEncodingResult(TileCoord.ofXYZ(41, 42, 4), new byte[]{3}, OptionalLong.empty());
final var tile4 = new TileEncodingResult(TileCoord.ofXYZ(51, 52, 5), new byte[]{4}, OptionalLong.empty());
try (var archive = WriteableProtoStreamArchive.newWriteToFile(csvFilePrimary, defaultConfig)) {
archive.initialize(minMetadataIn);
archive.initialize();
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(tile0);
tileWriter.write(tile1);
@ -122,7 +122,7 @@ class WriteableProtoStreamArchiveTest {
try (InputStream in = Files.newInputStream(csvFilePrimary)) {
assertEquals(
List.of(wrapInit(minMetadataOut), toEntry(tile0), toEntry(tile1), wrapFinish(maxMetadataOut)),
List.of(wrapInit(), toEntry(tile0), toEntry(tile1), wrapFinish(maxMetadataOut)),
readAllEntries(in)
);
}
@ -167,10 +167,8 @@ class WriteableProtoStreamArchiveTest {
.build();
}
private static StreamArchiveProto.Entry wrapInit(StreamArchiveProto.Metadata metadata) {
return StreamArchiveProto.Entry.newBuilder()
.setInitialization(StreamArchiveProto.InitializationEntry.newBuilder().setMetadata(metadata).build())
.build();
private static StreamArchiveProto.Entry wrapInit() {
return StreamArchiveProto.Entry.newBuilder().build();
}
private static StreamArchiveProto.Entry wrapFinish(StreamArchiveProto.Metadata metadata) {

Wyświetl plik

@ -3,18 +3,16 @@ package com.onthegomap.planetiler.util;
import static org.junit.jupiter.api.Assertions.*;
import com.onthegomap.planetiler.config.PlanetilerConfig;
import com.onthegomap.planetiler.stats.Stats;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
@ -25,26 +23,25 @@ class DownloaderTest {
@TempDir
Path path;
private final PlanetilerConfig config = PlanetilerConfig.defaults();
private final Stats stats = Stats.inMemory();
private long downloads = 0;
private AtomicLong downloads = new AtomicLong(0);
private Downloader mockDownloader(Map<String, byte[]> resources, boolean supportsRange, int maxLength) {
return new Downloader(config, stats, 2L) {
private Downloader mockDownloader(Map<String, byte[]> resources, boolean supportsRange) {
return new Downloader(config, 2L) {
@Override
InputStream openStream(String url) {
downloads++;
downloads.incrementAndGet();
assertTrue(resources.containsKey(url), "no resource for " + url);
byte[] bytes = resources.get(url);
return new ByteArrayInputStream(maxLength < bytes.length ? Arrays.copyOf(bytes, maxLength) : bytes);
return new ByteArrayInputStream(bytes);
}
@Override
InputStream openStreamRange(String url, long start, long end) {
assertTrue(supportsRange, "does not support range");
downloads++;
downloads.incrementAndGet();
assertTrue(resources.containsKey(url), "no resource for " + url);
byte[] result = new byte[Math.min(maxLength, (int) (end - start))];
byte[] result = new byte[(int) (end - start)];
byte[] bytes = resources.get(url);
for (int i = (int) start; i < start + result.length; i++) {
result[(int) (i - start)] = bytes[i];
@ -53,31 +50,28 @@ class DownloaderTest {
}
@Override
CompletableFuture<ResourceMetadata> httpHead(String url) {
ResourceMetadata httpHead(String url) {
String[] parts = url.split("#");
if (parts.length > 1) {
int redirectNum = Integer.parseInt(parts[1]);
String next = redirectNum <= 1 ? parts[0] : (parts[0] + "#" + (redirectNum - 1));
return CompletableFuture.supplyAsync(
() -> new ResourceMetadata(Optional.of(next), url, 0, supportsRange));
return new ResourceMetadata(Optional.of(next), url, 0, supportsRange);
}
byte[] bytes = resources.get(url);
return CompletableFuture.supplyAsync(
() -> new ResourceMetadata(Optional.empty(), url, bytes.length, supportsRange));
return new ResourceMetadata(Optional.empty(), url, bytes.length, supportsRange);
}
};
}
@ParameterizedTest
@CsvSource({
"false,100,0",
"true,100,0",
"true,2,0",
"false,100,1",
"false,100,2",
"true,2,4",
"false,0",
"true,0",
"false,1",
"false,2",
"true,4",
})
void testDownload(boolean range, int maxLength, int redirects) throws Exception {
void testDownload(boolean range, int redirects) throws Exception {
Path dest = path.resolve("out");
String string = "0123456789";
String url = "http://url";
@ -85,7 +79,7 @@ class DownloaderTest {
Map<String, byte[]> resources = new ConcurrentHashMap<>();
byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
Downloader downloader = mockDownloader(resources, range, maxLength);
Downloader downloader = mockDownloader(resources, range);
// fails if no data
var resource1 = new Downloader.ResourceToDownload("resource", initialUrl, dest);
@ -102,10 +96,10 @@ class DownloaderTest {
assertEquals(10, resource2.bytesDownloaded());
// does not re-request if size is the same
downloads = 0;
downloads.set(0);
var resource3 = new Downloader.ResourceToDownload("resource", initialUrl, dest);
downloader.downloadIfNecessary(resource3).get();
assertEquals(0, downloads);
assertEquals(0, downloads.get());
assertEquals(string, Files.readString(dest));
assertEquals(FileUtils.size(path), FileUtils.size(dest));
assertEquals(0, resource3.bytesDownloaded());
@ -115,7 +109,7 @@ class DownloaderTest {
String newContent = "54321";
resources.put(url, newContent.getBytes(StandardCharsets.UTF_8));
downloader.downloadIfNecessary(resource4).get();
assertTrue(downloads > 0, "downloads were " + downloads);
assertTrue(downloads.get() > 0, "downloads were " + downloads);
assertEquals(newContent, Files.readString(dest));
assertEquals(FileUtils.size(path), FileUtils.size(dest));
assertEquals(5, resource4.bytesDownloaded());
@ -123,7 +117,7 @@ class DownloaderTest {
@Test
void testDownloadFailsIfTooBig() {
var downloader = new Downloader(config, stats, 2L) {
var downloader = new Downloader(config, 2L) {
@Override
InputStream openStream(String url) {
@ -136,8 +130,8 @@ class DownloaderTest {
}
@Override
CompletableFuture<ResourceMetadata> httpHead(String url) {
return CompletableFuture.completedFuture(new ResourceMetadata(Optional.empty(), url, Long.MAX_VALUE, true));
ResourceMetadata httpHead(String url) {
return new ResourceMetadata(Optional.empty(), url, Long.MAX_VALUE, true);
}
};

Wyświetl plik

@ -152,4 +152,11 @@ class FileUtilsTest {
List.of("/shapefile/stations.shp", "/shapefile/stations.shx"),
matchingPaths.stream().map(Path::toString).sorted().toList());
}
@Test
void testExpandFile() throws IOException {
Path path = tmpDir.resolve("toExpand");
FileUtils.setLength(path, 1000);
assertEquals(1000, Files.size(path));
}
}

Wyświetl plik

@ -2,13 +2,8 @@ package com.onthegomap.planetiler.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.onthegomap.planetiler.VectorTile;
import com.onthegomap.planetiler.geo.GeoUtils;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.render.RenderedFeature;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.junit.jupiter.api.Test;
class LayerAttrStatsTest {
@ -17,109 +12,50 @@ class LayerAttrStatsTest {
@Test
void testEmptyLayerStats() {
assertEquals(Arrays.asList(new LayerAttrStats.VectorLayer[]{}), layerStats.getTileStats());
assertEquals(List.of(), layerStats.getTileStats());
}
@Test
void testEmptyLayerStatsOneLayer() {
layerStats.accept(new RenderedFeature(
TileCoord.ofXYZ(1, 2, 3),
new VectorTile.Feature(
"layer1",
1,
VectorTile.encodeGeometry(GeoUtils.point(1, 2)),
Map.of("a", 1, "b", "string", "c", true)
),
1,
Optional.empty()
));
assertEquals(Arrays.asList(new LayerAttrStats.VectorLayer[]{
new LayerAttrStats.VectorLayer("layer1", Map.of(
"a", LayerAttrStats.FieldType.NUMBER,
"b", LayerAttrStats.FieldType.STRING,
"c", LayerAttrStats.FieldType.BOOLEAN
), 3, 3)
}), layerStats.getTileStats());
layerStats.accept("layer1", 3, "a", 1);
layerStats.accept("layer1", 3, "b", "string");
layerStats.accept("layer1", 3, "c", true);
assertEquals(List.of(new LayerAttrStats.VectorLayer("layer1", Map.of(
"a", LayerAttrStats.FieldType.NUMBER,
"b", LayerAttrStats.FieldType.STRING,
"c", LayerAttrStats.FieldType.BOOLEAN
), 3, 3)), layerStats.getTileStats());
}
@Test
void testEmptyLayerStatsTwoLayers() {
layerStats.accept(new RenderedFeature(
TileCoord.ofXYZ(1, 2, 3),
new VectorTile.Feature(
"layer1",
1,
VectorTile.encodeGeometry(GeoUtils.point(1, 2)),
Map.of()
),
1,
Optional.empty()
));
layerStats.accept(new RenderedFeature(
TileCoord.ofXYZ(1, 2, 4),
new VectorTile.Feature(
"layer2",
1,
VectorTile.encodeGeometry(GeoUtils.point(1, 2)),
Map.of("a", 1, "b", true, "c", true)
),
1,
Optional.empty()
));
layerStats.accept(new RenderedFeature(
TileCoord.ofXYZ(1, 2, 1),
new VectorTile.Feature(
"layer2",
1,
VectorTile.encodeGeometry(GeoUtils.point(1, 2)),
Map.of("a", 1, "b", true, "c", 1)
),
1,
Optional.empty()
));
assertEquals(Arrays.asList(new LayerAttrStats.VectorLayer[]{
new LayerAttrStats.VectorLayer("layer1", Map.of(
), 3, 3),
layerStats.handlerForThread().forZoom(3).forLayer("layer1");
layerStats.accept("layer2", 4, "a", 1);
layerStats.accept("layer2", 4, "b", true);
layerStats.accept("layer2", 4, "c", true);
layerStats.accept("layer2", 1, "a", 1);
layerStats.accept("layer2", 1, "b", true);
layerStats.accept("layer2", 1, "c", 1);
assertEquals(List.of(new LayerAttrStats.VectorLayer("layer1", Map.of(
), 3, 3),
new LayerAttrStats.VectorLayer("layer2", Map.of(
"a", LayerAttrStats.FieldType.NUMBER,
"b", LayerAttrStats.FieldType.BOOLEAN,
"c", LayerAttrStats.FieldType.STRING
), 1, 4)
}), layerStats.getTileStats());
), 1, 4)), layerStats.getTileStats());
}
@Test
void testMergeFromMultipleThreads() throws InterruptedException {
Thread t1 = new Thread(() -> layerStats.accept(new RenderedFeature(
TileCoord.ofXYZ(1, 2, 3),
new VectorTile.Feature(
"layer1",
1,
VectorTile.encodeGeometry(GeoUtils.point(1, 2)),
Map.of("a", 1)
),
1,
Optional.empty()
)));
layerStats.accept("layer1", 3, "a", true);
Thread t1 = new Thread(() -> layerStats.accept("layer1", 3, "a", 1));
t1.start();
Thread t2 = new Thread(() -> layerStats.accept(new RenderedFeature(
TileCoord.ofXYZ(1, 2, 4),
new VectorTile.Feature(
"layer1",
1,
VectorTile.encodeGeometry(GeoUtils.point(1, 2)),
Map.of("a", true)
),
1,
Optional.empty()
)));
Thread t2 = new Thread(() -> layerStats.accept("layer1", 4, "a", true));
t2.start();
t1.join();
t2.join();
assertEquals(Arrays.asList(new LayerAttrStats.VectorLayer[]{
new LayerAttrStats.VectorLayer("layer1", Map.of(
"a", LayerAttrStats.FieldType.STRING
), 3, 4)
}), layerStats.getTileStats());
assertEquals(List.of(new LayerAttrStats.VectorLayer("layer1", Map.of(
"a", LayerAttrStats.FieldType.STRING
), 3, 4)), layerStats.getTileStats());
}
}

Wyświetl plik

@ -0,0 +1,20 @@
package com.onthegomap.planetiler.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
class LogUtilTest {
@Test
void testStageHandling() {
LogUtil.clearStage();
assertNull(LogUtil.getStage());
LogUtil.setStage("test");
assertEquals("test", LogUtil.getStage());
LogUtil.setStage(LogUtil.getStage(), "child");
assertEquals("test:child", LogUtil.getStage());
LogUtil.clearStage();
assertNull(LogUtil.getStage());
}
}

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Plik binarny nie jest wyświetlany.

Wyświetl plik

@ -2,7 +2,7 @@ appenders=console
appender.console.type=Console
appender.console.name=STDOUT
appender.console.layout.type=PatternLayout
appender.console.layout.pattern=$${uptime:now} %level{length=3} %notEmpty{[%X{stage}] }- %msg%n%throwable
appender.console.layout.pattern=$${uptime:now} %level{length=3} %X{stage}- %msg%n%throwable
packages=com.onthegomap.planetiler.util.log4j
rootLogger.level=warn
rootLogger.appenderRefs=stdout

Wyświetl plik

@ -292,15 +292,18 @@ Specific tile post processing operations for merging features may be defined:
- `merge_line_strings` - Combines linestrings with the same set of attributes into a multilinestring where segments with
touching endpoints are merged.
- `merge_polygons` - Combines polygons with the same set of attributes into a multipolygon where overlapping/touching polygons
- `merge_polygons` - Combines polygons with the same set of attributes into a multipolygon where overlapping/touching
polygons
are combined into fewer polygons covering the same area.
The follow attributes for `merge_line_strings` may be set:
- `min_length` - Minimum tile pixel length of features to emit, or 0 to emit all merged linestrings.
- `tolerance` - After merging, simplify linestrings using this pixel tolerance, or -1 to skip simplification step.
- `buffer` - Number of pixels outside the visible tile area to include detail for, or -1 to skip clipping step.
The follow attribute for `merge_polygons` may be set:
- `min_area` - Minimum area in square tile pixels of polygons to emit.
For example:
@ -482,6 +485,11 @@ nested, so each child context can also access the variables from its parent.
>> - `feature.id` - numeric ID of the input feature
>> - `feature.source` - string source ID this feature came from
>> - `feature.source_layer` - optional layer within the source the feature came from
>> - `feature.osm_changeset` - optional OSM changeset ID for this feature
>> - `feature.osm_version` - optional OSM element version for this feature
>> - `feature.osm_timestamp` - optional OSM last modified timestamp for this feature
>> - `feature.osm_user_id` - optional ID of the OSM user that last modified this feature
>> - `feature.osm_user_name` - optional name of the OSM user that last modified this feature
>>
>>> ##### post-match context
>>>
@ -534,7 +542,7 @@ in [PlanetilerStdLib](src/main/java/com/onthegomap/planetiler/custommap/expressi
- `<string>.replace(from, to, limit)` returns the input string with the first N occurrences of from replaced by to
- `<string>.replaceRegex(pattern, value)` replaces every occurrence of regular expression with value from the string
it was called on using java's
built-in [replaceAll](<https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/regex/Matcher.html#replaceAll(java.lang.String)>)
built-in [replaceAll](<https://docs.oracle.com/en/java/javase/21/docs/api/java.base/java/util/regex/Matcher.html#replaceAll(java.lang.String)>)
behavior
- `<string>.split(separator)` returns a list of strings split from the input by a separator
- `<string>.split(separator, limit)` splits the list into up to N parts

Wyświetl plik

@ -45,7 +45,7 @@
<dependency>
<groupId>org.projectnessie.cel</groupId>
<artifactId>cel-bom</artifactId>
<version>0.3.21</version>
<version>0.4.3</version>
<type>pom</type>
<scope>import</scope>
</dependency>

Wyświetl plik

@ -86,7 +86,7 @@ public class ConfigExpressionParser<I extends ScriptContext> {
return cast(signature(output), child, dataType);
} else {
var keys = map.keySet();
if (keys.equals(Set.of("coalesce")) && map.get("coalesce")instanceof Collection<?> cases) {
if (keys.equals(Set.of("coalesce")) && map.get("coalesce") instanceof Collection<?> cases) {
return coalesce(cases.stream().map(item -> parse(item, output)).toList());
} else if (keys.equals(Set.of("match"))) {
return parseMatch(map.get("match"), true, output);

Wyświetl plik

@ -13,6 +13,8 @@ import com.onthegomap.planetiler.expression.DataType;
import com.onthegomap.planetiler.reader.SourceFeature;
import com.onthegomap.planetiler.reader.WithGeometryType;
import com.onthegomap.planetiler.reader.WithTags;
import com.onthegomap.planetiler.reader.osm.OsmElement;
import com.onthegomap.planetiler.reader.osm.OsmSourceFeature;
import com.onthegomap.planetiler.util.Try;
import java.util.HashMap;
import java.util.LinkedHashMap;
@ -340,6 +342,11 @@ public class Contexts {
private static final String FEATURE_ID = "feature.id";
private static final String FEATURE_SOURCE = "feature.source";
private static final String FEATURE_SOURCE_LAYER = "feature.source_layer";
private static final String FEATURE_OSM_CHANGESET = "feature.osm_changeset";
private static final String FEATURE_OSM_VERSION = "feature.osm_version";
private static final String FEATURE_OSM_TIMESTAMP = "feature.osm_timestamp";
private static final String FEATURE_OSM_USER_ID = "feature.osm_user_id";
private static final String FEATURE_OSM_USER_NAME = "feature.osm_user_name";
public static ScriptEnvironment<ProcessFeature> description(Root root) {
return root.description()
@ -348,7 +355,12 @@ public class Contexts {
Decls.newVar(FEATURE_TAGS, Decls.newMapType(Decls.String, Decls.Any)),
Decls.newVar(FEATURE_ID, Decls.Int),
Decls.newVar(FEATURE_SOURCE, Decls.String),
Decls.newVar(FEATURE_SOURCE_LAYER, Decls.String)
Decls.newVar(FEATURE_SOURCE_LAYER, Decls.String),
Decls.newVar(FEATURE_OSM_CHANGESET, Decls.Int),
Decls.newVar(FEATURE_OSM_VERSION, Decls.Int),
Decls.newVar(FEATURE_OSM_TIMESTAMP, Decls.Int),
Decls.newVar(FEATURE_OSM_USER_ID, Decls.Int),
Decls.newVar(FEATURE_OSM_USER_NAME, Decls.String)
);
}
@ -360,7 +372,17 @@ public class Contexts {
case FEATURE_ID -> feature.id();
case FEATURE_SOURCE -> feature.getSource();
case FEATURE_SOURCE_LAYER -> wrapNullable(feature.getSourceLayer());
default -> null;
default -> {
OsmElement.Info info = feature instanceof OsmSourceFeature osm ? osm.originalElement().info() : null;
yield info == null ? null : switch (key) {
case FEATURE_OSM_CHANGESET -> info.changeset();
case FEATURE_OSM_VERSION -> info.version();
case FEATURE_OSM_TIMESTAMP -> info.timestamp();
case FEATURE_OSM_USER_ID -> info.userId();
case FEATURE_OSM_USER_NAME -> wrapNullable(info.user());
default -> null;
};
}
};
} else {
return null;
@ -410,7 +432,7 @@ public class Contexts {
}
public String matchKey() {
return matchKeys().isEmpty() ? null : matchKeys().get(0);
return matchKeys().isEmpty() ? null : matchKeys().getFirst();
}
public Object matchValue() {

Wyświetl plik

@ -65,7 +65,7 @@ public class TypeConversion {
return d % 1 == 0 ? Long.toString(d.longValue()) : d.toString();
}
private record Converter<I, O> (Class<I> in, Class<O> out, Function<I, O> fn) implements Function<Object, O> {
private record Converter<I, O>(Class<I> in, Class<O> out, Function<I, O> fn) implements Function<Object, O> {
@Override
public O apply(Object in) {
@SuppressWarnings("unchecked") I converted = (I) in;

Wyświetl plik

@ -15,7 +15,7 @@ import java.util.Objects;
*
* @param <T> Type of the expression context
*/
public record BooleanExpressionScript<T extends ScriptContext> (
public record BooleanExpressionScript<T extends ScriptContext>(
String expressionText,
ConfigExpressionScript<T, Boolean> expression,
Class<T> inputClass

Wyświetl plik

@ -70,7 +70,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that always returns {@code value}. */
record Const<I extends ScriptContext, O> (O value) implements ConfigExpression<I, O> {
record Const<I extends ScriptContext, O>(O value) implements ConfigExpression<I, O> {
@Override
public O apply(I i) {
@ -79,7 +79,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that returns the value associated with the first matching boolean expression. */
record Match<I extends ScriptContext, O> (
record Match<I extends ScriptContext, O>(
Signature<I, O> signature,
MultiExpression<ConfigExpression<I, O>> multiExpression,
ConfigExpression<I, O> fallback,
@ -146,7 +146,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that returns the first non-null result of evaluating each child expression. */
record Coalesce<I extends ScriptContext, O> (List<? extends ConfigExpression<I, O>> children)
record Coalesce<I extends ScriptContext, O>(List<? extends ConfigExpression<I, O>> children)
implements ConfigExpression<I, O> {
@Override
@ -164,7 +164,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
public ConfigExpression<I, O> simplifyOnce() {
return switch (children.size()) {
case 0 -> constOf(null);
case 1 -> children.get(0);
case 1 -> children.getFirst();
default -> {
var result = children.stream()
.flatMap(
@ -184,7 +184,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that returns the value associated a given variable name at runtime. */
record Variable<I extends ScriptContext, O> (
record Variable<I extends ScriptContext, O>(
Signature<I, O> signature,
String name
) implements ConfigExpression<I, O> {
@ -202,7 +202,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that returns the value associated a given tag of the input feature at runtime. */
record GetTag<I extends ScriptContext, O> (
record GetTag<I extends ScriptContext, O>(
Signature<I, O> signature,
ConfigExpression<I, String> tag
) implements ConfigExpression<I, O> {
@ -219,7 +219,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that returns the value associated a given argument at runtime. */
record GetArg<I extends ScriptContext, O> (
record GetArg<I extends ScriptContext, O>(
Signature<I, O> signature,
ConfigExpression<I, String> arg
) implements ConfigExpression<I, O> {
@ -242,7 +242,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
/** An expression that converts the input to a desired output {@link DataType} at runtime. */
record Cast<I extends ScriptContext, O> (
record Cast<I extends ScriptContext, O>(
Signature<I, O> signature,
ConfigExpression<I, ?> input,
DataType output
@ -268,7 +268,7 @@ public interface ConfigExpression<I extends ScriptContext, O>
}
}
record Signature<I extends ScriptContext, O> (ScriptEnvironment<I> in, Class<O> out) {
record Signature<I extends ScriptContext, O>(ScriptEnvironment<I> in, Class<O> out) {
public <O2> Signature<I, O2> withOutput(Class<O2> newOut) {
return new Signature<>(in, newOut);

Wyświetl plik

@ -12,7 +12,7 @@ import java.util.stream.Stream;
* @param clazz Class of the input context type
* @param <T> The runtime expression context type
*/
public record ScriptEnvironment<T extends ScriptContext> (List<Decl> declarations, Class<T> clazz, Contexts.Root root) {
public record ScriptEnvironment<T extends ScriptContext>(List<Decl> declarations, Class<T> clazz, Contexts.Root root) {
private static <T> List<T> concat(List<T> a, List<T> b) {
return Stream.concat(a.stream(), b.stream()).toList();
}

Wyświetl plik

@ -29,9 +29,9 @@ import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Stream;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.geotools.geometry.jts.WKTReader2;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.io.ParseException;
import org.locationtech.jts.io.WKTReader;
import org.snakeyaml.engine.v2.exceptions.YamlEngineException;
/** Verifies that a profile maps input elements map to expected output vector tile features. */
@ -164,7 +164,7 @@ public class SchemaValidator {
default -> geometry;
};
try {
return new WKTReader2().read(wkt);
return new WKTReader().read(wkt);
} catch (ParseException e) {
throw new IllegalArgumentException("""
Bad geometry: "%s", must be "point" "line" "polygon" or a valid WKT string.

Wyświetl plik

@ -22,6 +22,7 @@ import com.onthegomap.planetiler.geo.GeoUtils;
import com.onthegomap.planetiler.geo.GeometryException;
import com.onthegomap.planetiler.reader.SimpleFeature;
import com.onthegomap.planetiler.reader.SourceFeature;
import com.onthegomap.planetiler.reader.osm.OsmElement;
import com.onthegomap.planetiler.stats.Stats;
import java.nio.file.Path;
import java.util.List;
@ -40,6 +41,7 @@ class ConfiguredFeatureTest {
private static final Function<String, Path> TEST_RESOURCE = TestConfigurableUtils::pathToTestResource;
private static final Function<String, Path> SAMPLE_RESOURCE = TestConfigurableUtils::pathToSample;
private static final Function<String, Path> TEST_INVALID_RESOURCE = TestConfigurableUtils::pathToTestInvalidResource;
private static final OsmElement.Info OSM_INFO = new OsmElement.Info(2, 3, 4, 5, "user");
private static final Map<String, Object> waterTags = Map.of(
"natural", "water",
@ -130,14 +132,15 @@ class ConfiguredFeatureTest {
private void testPolygon(String config, Map<String, Object> tags,
Consumer<Feature> test, int expectedMatchCount) {
var sf =
SimpleFeature.createFakeOsmFeature(newPolygon(0, 0, 1, 0, 1, 1, 0, 0), tags, "osm", null, 1, emptyList());
SimpleFeature.createFakeOsmFeature(newPolygon(0, 0, 1, 0, 1, 1, 0, 0), tags, "osm", null, 1, emptyList(),
OSM_INFO);
testFeature(config, sf, test, expectedMatchCount);
}
private void testPoint(String config, Map<String, Object> tags,
Consumer<Feature> test, int expectedMatchCount) {
var sf =
SimpleFeature.createFakeOsmFeature(newPoint(0, 0), tags, "osm", null, 1, emptyList());
SimpleFeature.createFakeOsmFeature(newPoint(0, 0), tags, "osm", null, 1, emptyList(), OSM_INFO);
testFeature(config, sf, test, expectedMatchCount);
}
@ -145,21 +148,22 @@ class ConfiguredFeatureTest {
private void testLinestring(String config,
Map<String, Object> tags, Consumer<Feature> test, int expectedMatchCount) {
var sf =
SimpleFeature.createFakeOsmFeature(newLineString(0, 0, 1, 0, 1, 1), tags, "osm", null, 1, emptyList());
SimpleFeature.createFakeOsmFeature(newLineString(0, 0, 1, 0, 1, 1), tags, "osm", null, 1, emptyList(), OSM_INFO);
testFeature(config, sf, test, expectedMatchCount);
}
private void testPolygon(Function<String, Path> pathFunction, String schemaFilename, Map<String, Object> tags,
Consumer<Feature> test, int expectedMatchCount) {
var sf =
SimpleFeature.createFakeOsmFeature(newPolygon(0, 0, 1, 0, 1, 1, 0, 0), tags, "osm", null, 1, emptyList());
SimpleFeature.createFakeOsmFeature(newPolygon(0, 0, 1, 0, 1, 1, 0, 0), tags, "osm", null, 1, emptyList(),
OSM_INFO);
testFeature(pathFunction, schemaFilename, sf, test, expectedMatchCount);
}
private void testLinestring(Function<String, Path> pathFunction, String schemaFilename,
Map<String, Object> tags, Consumer<Feature> test, int expectedMatchCount) {
var sf =
SimpleFeature.createFakeOsmFeature(newLineString(0, 0, 1, 0, 1, 1), tags, "osm", null, 1, emptyList());
SimpleFeature.createFakeOsmFeature(newLineString(0, 0, 1, 0, 1, 1), tags, "osm", null, 1, emptyList(), OSM_INFO);
testFeature(pathFunction, schemaFilename, sf, test, expectedMatchCount);
}
@ -547,6 +551,11 @@ class ConfiguredFeatureTest {
"\\\\${feature.id}|\\${feature.id}",
"${feature.source}|osm",
"${feature.source_layer}|null",
"${feature.osm_changeset}|2",
"${feature.osm_timestamp}|3",
"${feature.osm_user_id}|4",
"${feature.osm_version}|5",
"${feature.osm_user_name}|user",
"${coalesce(feature.source_layer, 'missing')}|missing",
"{match: {test: {natural: water}}}|test",
"{match: {test: {natural: not_water}}}|null",

Wyświetl plik

@ -14,6 +14,12 @@
<properties>
<mainClass>com.onthegomap.planetiler.Main</mainClass>
<!--
compile against an earlier version so that Main runs but can emit a more readable
error when running on an older version of Java
-->
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<image.version>${project.version}</image.version>

<assembly-phase>package</assembly-phase>
@ -54,7 +60,7 @@
<skip>false</skip>
<from>

<platforms>
<platform>
@ -73,6 +79,8 @@
</org.opencontainers.image.source>
</labels>
<mainClass>${mainClass}</mainClass>
<creationTime>${maven.build.timestamp}</creationTime>
<filesModificationTime>${maven.build.timestamp}</filesModificationTime>
</container>
</configuration>
</plugin>

Wyświetl plik

@ -26,6 +26,15 @@ import org.openmaptiles.util.VerifyMonaco;
* public static void main(String[] args)} methods of runnable classes.
*/
public class Main {
static {
int version = Runtime.version().feature();
if (version < 21) {
System.err.println(
"You are using Java " + version +
" but Planetiler requires 21 or later, for more details on upgrading see: https://github.com/onthegomap/planetiler/blob/main/CONTRIBUTING.md");
System.exit(1);
}
}
private static final EntryPoint DEFAULT_TASK = OpenMapTilesMain::main;
private static final Map<String, EntryPoint> ENTRY_POINTS = Map.ofEntries(

Wyświetl plik

@ -4,7 +4,7 @@ This is a minimal example project that shows how to create custom maps with Plan
Requirements:
- Java 17+ (see [CONTIRBUTING.md](../CONTRIBUTING.md))
- Java 21+ (see [CONTIRBUTING.md](../CONTRIBUTING.md))
- on mac: `brew install --cask temurin`
- [Maven](https://maven.apache.org/install.html)
- on mac: `brew install maven`

Wyświetl plik

@ -64,11 +64,11 @@ public class BikeRouteOverlay implements Profile {
relation.getString("route"),
// except map network abbreviation to a human-readable value
switch (relation.getString("network", "")) {
case "icn" -> "international";
case "ncn" -> "national";
case "rcn" -> "regional";
case "lcn" -> "local";
default -> "other";
case "icn" -> "international";
case "ncn" -> "national";
case "rcn" -> "regional";
case "lcn" -> "local";
default -> "other";
}
));
}

Wyświetl plik

@ -76,10 +76,12 @@ public class OsmQaTiles implements Profile {
}
feature
.setAttr("@id", sourceFeature.id())
.setAttr("@type", element instanceof OsmElement.Node ? "node" :
element instanceof OsmElement.Way ? "way" :
element instanceof OsmElement.Relation ? "relation" : null
);
.setAttr("@type", switch (element) {
case OsmElement.Node ignored -> "node";
case OsmElement.Way ignored -> "way";
case OsmElement.Relation ignored -> "relation";
default -> null;
});
var info = element.info();
if (info != null) {
feature

Wyświetl plik

@ -9,10 +9,10 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<planetiler.version>0.7-SNAPSHOT</planetiler.version>
<junit.version>5.10.0</junit.version>
<junit.version>5.10.1</junit.version>
<!-- Replace this with the main class for the profile you add -->
<mainClass>com.onthegomap.planetiler.examples.BikeRouteOverlay</mainClass>
</properties>
@ -78,12 +78,12 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.1.2</version>
<version>3.2.3</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.1.2</version>
<version>3.2.3</version>
</plugin>
<!-- Create an executable jar from "mvn package" goal -->

Some files were not shown because too many files have changed in this diff Show More