Porównaj commity

...

7 Commity

Autor SHA1 Wiadomość Data
Björn Bilger 5a74de9208
Merge be4994a44a into 6f6a4dd6ed 2024-04-13 08:24:59 +10:00
dependabot[bot] 6f6a4dd6ed
Bump org.apache.maven.plugins:maven-gpg-plugin from 3.2.2 to 3.2.3 (#867)
Bumps [org.apache.maven.plugins:maven-gpg-plugin](https://github.com/apache/maven-gpg-plugin) from 3.2.2 to 3.2.3.
- [Release notes](https://github.com/apache/maven-gpg-plugin/releases)
- [Commits](https://github.com/apache/maven-gpg-plugin/compare/maven-gpg-plugin-3.2.2...maven-gpg-plugin-3.2.3)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-gpg-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-11 06:11:07 -04:00
dependabot[bot] 6b9c996df9
Bump org.apache.maven.plugins:maven-source-plugin from 3.3.0 to 3.3.1 (#865)
Bumps [org.apache.maven.plugins:maven-source-plugin](https://github.com/apache/maven-source-plugin) from 3.3.0 to 3.3.1.
- [Commits](https://github.com/apache/maven-source-plugin/compare/maven-source-plugin-3.3.0...maven-source-plugin-3.3.1)

---
updated-dependencies:
- dependency-name: org.apache.maven.plugins:maven-source-plugin
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-04-10 07:13:03 -04:00
Michael Barry 978c64f0c6
Add polygon index intersection utility (#866) 2024-04-10 07:12:29 -04:00
bbilger be4994a44a refactoring 2024-01-23 19:40:53 +01:00
bbilger 19c834fb5d properly close the getAllTiles iterator 2024-01-23 19:19:25 +01:00
bbilger 264b3515c4 add tile-copy utility
to copy tiles from one archive into another
e.g. mbtiles to files, pmtiles to mbtiles, ...
2024-01-23 19:19:23 +01:00
51 zmienionych plików z 2015 dodań i 408 usunięć

Wyświetl plik

@ -41,7 +41,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.IntStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -659,18 +658,8 @@ public class Planetiler {
System.exit(0);
} else if (onlyDownloadSources) {
// don't check files if not generating map
} else if (config.append()) {
if (!output.format().supportsAppend()) {
throw new IllegalArgumentException("cannot append to " + output.format().id());
}
if (!output.exists()) {
throw new IllegalArgumentException(output.uri() + " must exist when appending");
}
} else if (overwrite || config.force()) {
output.delete();
} else if (output.exists()) {
throw new IllegalArgumentException(
output.uri() + " already exists, use the --force argument to overwrite or --append.");
} else {
output.setup(config.force() || overwrite, config.append(), config.tileWriteThreads());
}
Path layerStatsPath = arguments.file("layer_stats", "layer stats output path",
@ -680,23 +669,6 @@ public class Planetiler {
if (config.tileWriteThreads() < 1) {
throw new IllegalArgumentException("require tile_write_threads >= 1");
}
if (config.tileWriteThreads() > 1) {
if (!output.format().supportsConcurrentWrites()) {
throw new IllegalArgumentException(output.format() + " doesn't support concurrent writes");
}
IntStream.range(1, config.tileWriteThreads())
.mapToObj(output::getPathForMultiThreadedWriter)
.forEach(p -> {
if (!config.append() && (overwrite || config.force())) {
FileUtils.delete(p);
}
if (config.append() && !output.exists(p)) {
throw new IllegalArgumentException("indexed archive \"" + p + "\" must exist when appending");
} else if (!config.append() && output.exists(p)) {
throw new IllegalArgumentException("indexed archive \"" + p + "\" must not exist when not appending");
}
});
}
LOGGER.info("Building {} profile into {} in these phases:", profile.getClass().getSimpleName(), output.uri());
@ -721,7 +693,7 @@ public class Planetiler {
// in case any temp files are left from a previous run...
FileUtils.delete(tmpDir, nodeDbPath, featureDbPath, multipolygonPath);
Files.createDirectories(tmpDir);
FileUtils.createParentDirectories(nodeDbPath, featureDbPath, multipolygonPath, output.getLocalBasePath());
FileUtils.createParentDirectories(nodeDbPath, featureDbPath, multipolygonPath);
if (!toDownload.isEmpty()) {
download();

Wyświetl plik

@ -16,6 +16,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.IntStream;
import java.util.stream.Stream;
/**
@ -158,7 +159,7 @@ public record TileArchiveConfig(
/**
* Returns the local <b>base</b> path for this archive, for which directories should be pre-created for.
*/
public Path getLocalBasePath() {
Path getLocalBasePath() {
Path p = getLocalPath();
if (format() == Format.FILES) {
p = FilesArchiveUtils.cleanBasePath(p);
@ -166,7 +167,6 @@ public record TileArchiveConfig(
return p;
}
/**
* Deletes the archive if possible.
*/
@ -187,7 +187,7 @@ public record TileArchiveConfig(
* @param p path to the archive
* @return {@code true} if the archive already exists, {@code false} otherwise.
*/
public boolean exists(Path p) {
private boolean exists(Path p) {
if (p == null) {
return false;
}
@ -229,14 +229,49 @@ public record TileArchiveConfig(
};
}
public void setup(boolean force, boolean append, int tileWriteThreads) {
if (append) {
if (!format().supportsAppend()) {
throw new IllegalArgumentException("cannot append to " + format().id());
}
if (!exists()) {
throw new IllegalArgumentException(uri() + " must exist when appending");
}
} else if (force) {
delete();
} else if (exists()) {
throw new IllegalArgumentException(uri() + " already exists, use the --force argument to overwrite or --append.");
}
if (tileWriteThreads > 1) {
if (!format().supportsConcurrentWrites()) {
throw new IllegalArgumentException(format() + " doesn't support concurrent writes");
}
IntStream.range(1, tileWriteThreads)
.mapToObj(this::getPathForMultiThreadedWriter)
.forEach(p -> {
if (!append && force) {
FileUtils.delete(p);
}
if (append && !exists(p)) {
throw new IllegalArgumentException("indexed archive \"" + p + "\" must exist when appending");
} else if (!append && exists(p)) {
throw new IllegalArgumentException("indexed archive \"" + p + "\" must not exist when not appending");
}
});
}
FileUtils.createParentDirectories(getLocalBasePath());
}
public enum Format {
MBTILES("mbtiles",
false /* TODO mbtiles could support append in the future by using insert statements with an "on conflict"-clause (i.e. upsert) and by creating tables only if they don't exist, yet */,
false, TileOrder.TMS),
PMTILES("pmtiles", false, false, TileOrder.HILBERT),
false, false, TileOrder.TMS),
PMTILES("pmtiles", false, false, false, TileOrder.HILBERT),
// should be before PBF in order to avoid collisions
FILES("files", true, true, TileOrder.TMS) {
FILES("files", true, true, true, TileOrder.TMS) {
@Override
boolean isUriSupported(URI uri) {
final String path = uri.getPath();
@ -245,25 +280,28 @@ public record TileArchiveConfig(
}
},
CSV("csv", true, true, TileOrder.TMS),
CSV("csv", true, true, false, TileOrder.TMS),
/** identical to {@link Format#CSV} - except for the column separator */
TSV("tsv", true, true, TileOrder.TMS),
TSV("tsv", true, true, false, TileOrder.TMS),
PROTO("proto", true, true, TileOrder.TMS),
PROTO("proto", true, true, false, TileOrder.TMS),
/** identical to {@link Format#PROTO} */
PBF("pbf", true, true, TileOrder.TMS),
PBF("pbf", true, true, false, TileOrder.TMS),
JSON("json", true, true, TileOrder.TMS);
JSON("json", true, true, false, TileOrder.TMS);
private final String id;
private final boolean supportsAppend;
private final boolean supportsConcurrentWrites;
private final boolean supportsConcurrentReads;
private final TileOrder order;
Format(String id, boolean supportsAppend, boolean supportsConcurrentWrites, TileOrder order) {
Format(String id, boolean supportsAppend, boolean supportsConcurrentWrites, boolean supportsConcurrentReads,
TileOrder order) {
this.id = id;
this.supportsAppend = supportsAppend;
this.supportsConcurrentWrites = supportsConcurrentWrites;
this.supportsConcurrentReads = supportsConcurrentReads;
this.order = order;
}
@ -283,6 +321,10 @@ public record TileArchiveConfig(
return supportsConcurrentWrites;
}
public boolean supportsConcurrentReads() {
return supportsConcurrentReads;
}
boolean isUriSupported(URI uri) {
final String path = uri.getPath();
return path != null && path.endsWith("." + id);

Wyświetl plik

@ -170,6 +170,11 @@ public record TileArchiveMetadata(
maxzoom, json, others, tileCompression);
}
public TileArchiveMetadata withTileCompression(TileCompression tileCompression) {
return new TileArchiveMetadata(name, description, attribution, version, type, format, bounds, center, minzoom,
maxzoom, json, others, tileCompression);
}
/*
* few workarounds to make collect unknown fields to others work,
* because @JsonAnySetter does not yet work on constructor/creator arguments

Wyświetl plik

@ -261,7 +261,7 @@ public class TileArchiveWriter {
* To optimize emitting many identical consecutive tiles (like large ocean areas), memoize output to avoid
* recomputing if the input hasn't changed.
*/
byte[] lastBytes = null, lastEncoded = null;
byte[] lastBytes = null;
Long lastTileDataHash = null;
boolean lastIsFill = false;
List<TileSizeStats.LayerStats> lastLayerStats = null;
@ -277,24 +277,22 @@ public class TileArchiveWriter {
for (int i = 0; i < batch.in.size(); i++) {
FeatureGroup.TileFeatures tileFeatures = batch.in.get(i);
featuresProcessed.incBy(tileFeatures.getNumFeaturesProcessed());
byte[] bytes, encoded;
byte[] bytes;
List<TileSizeStats.LayerStats> layerStats;
Long tileDataHash;
if (tileFeatures.hasSameContents(last)) {
bytes = lastBytes;
encoded = lastEncoded;
tileDataHash = lastTileDataHash;
layerStats = lastLayerStats;
memoizedTiles.inc();
} else {
VectorTile tile = tileFeatures.getVectorTile(layerAttrStatsUpdater);
if (skipFilled && (lastIsFill = tile.containsOnlyFills())) {
encoded = null;
layerStats = null;
bytes = null;
} else {
var proto = tile.toProto();
encoded = proto.toByteArray();
var encoded = proto.toByteArray();
bytes = switch (config.tileCompression()) {
case GZIP -> gzip(encoded);
case NONE -> encoded;
@ -308,7 +306,6 @@ public class TileArchiveWriter {
}
}
lastLayerStats = layerStats;
lastEncoded = encoded;
lastBytes = bytes;
last = tileFeatures;
if (archive.deduplicates() && tile.likelyToBeDuplicated() && bytes != null) {
@ -327,7 +324,6 @@ public class TileArchiveWriter {
new TileEncodingResult(
tileFeatures.tileCoord(),
bytes,
encoded.length,
tileDataHash == null ? OptionalLong.empty() : OptionalLong.of(tileDataHash),
layerStatsRows
)

Wyświetl plik

@ -1,17 +1,23 @@
package com.onthegomap.planetiler.archive;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.config.CommonConfigs;
import com.onthegomap.planetiler.config.PlanetilerConfig;
import com.onthegomap.planetiler.files.ReadableFilesArchive;
import com.onthegomap.planetiler.files.WriteableFilesArchive;
import com.onthegomap.planetiler.mbtiles.Mbtiles;
import com.onthegomap.planetiler.pmtiles.ReadablePmtiles;
import com.onthegomap.planetiler.pmtiles.WriteablePmtiles;
import com.onthegomap.planetiler.stream.ReadableCsvArchive;
import com.onthegomap.planetiler.stream.ReadableJsonStreamArchive;
import com.onthegomap.planetiler.stream.ReadableProtoStreamArchive;
import com.onthegomap.planetiler.stream.StreamArchiveConfig;
import com.onthegomap.planetiler.stream.WriteableCsvArchive;
import com.onthegomap.planetiler.stream.WriteableJsonStreamArchive;
import com.onthegomap.planetiler.stream.WriteableProtoStreamArchive;
import java.io.IOException;
import java.nio.file.Path;
import java.util.function.Supplier;
/** Utilities for creating {@link ReadableTileArchive} and {@link WriteableTileArchive} instances. */
public class TileArchives {
@ -37,45 +43,58 @@ public class TileArchives {
return newReader(TileArchiveConfig.from(archive), config);
}
public static WriteableTileArchive newWriter(TileArchiveConfig archive, PlanetilerConfig config)
throws IOException {
return newWriter(archive, config.arguments());
}
/**
* Returns a new {@link WriteableTileArchive} from the string definition in {@code archive}.
*
* @throws IOException if an error occurs creating the resource.
*/
public static WriteableTileArchive newWriter(TileArchiveConfig archive, PlanetilerConfig config)
public static WriteableTileArchive newWriter(TileArchiveConfig archive, Arguments baseArguments)
throws IOException {
var options = archive.applyFallbacks(config.arguments());
var options = archive.applyFallbacks(baseArguments);
var format = archive.format();
return switch (format) {
case MBTILES ->
// pass-through legacy arguments for fallback
Mbtiles.newWriteToFileDatabase(archive.getLocalPath(), options.orElse(config.arguments()
Mbtiles.newWriteToFileDatabase(archive.getLocalPath(), options.orElse(baseArguments
.subset(Mbtiles.LEGACY_VACUUM_ANALYZE, Mbtiles.LEGACY_COMPACT_DB, Mbtiles.LEGACY_SKIP_INDEX_CREATION)));
case PMTILES -> WriteablePmtiles.newWriteToFile(archive.getLocalPath());
case CSV, TSV -> WriteableCsvArchive.newWriteToFile(format, archive.getLocalPath(),
new StreamArchiveConfig(config, options));
new StreamArchiveConfig(baseArguments, options));
case PROTO, PBF -> WriteableProtoStreamArchive.newWriteToFile(archive.getLocalPath(),
new StreamArchiveConfig(config, options));
new StreamArchiveConfig(baseArguments, options));
case JSON -> WriteableJsonStreamArchive.newWriteToFile(archive.getLocalPath(),
new StreamArchiveConfig(config, options));
case FILES -> WriteableFilesArchive.newWriter(archive.getLocalPath(), options, config.force() || config.append());
new StreamArchiveConfig(baseArguments, options));
case FILES -> WriteableFilesArchive.newWriter(archive.getLocalPath(), options,
CommonConfigs.appendToArchive(baseArguments) || CommonConfigs.force(baseArguments));
};
}
public static ReadableTileArchive newReader(TileArchiveConfig archive, PlanetilerConfig config)
throws IOException {
return newReader(archive, config.arguments());
}
/**
* Returns a new {@link ReadableTileArchive} from the string definition in {@code archive}.
*
* @throws IOException if an error occurs opening the resource.
*/
public static ReadableTileArchive newReader(TileArchiveConfig archive, PlanetilerConfig config)
public static ReadableTileArchive newReader(TileArchiveConfig archive, Arguments baseArguments)
throws IOException {
var options = archive.applyFallbacks(config.arguments());
var options = archive.applyFallbacks(baseArguments);
Supplier<StreamArchiveConfig> streamArchiveConfig = () -> new StreamArchiveConfig(baseArguments, options);
return switch (archive.format()) {
case MBTILES -> Mbtiles.newReadOnlyDatabase(archive.getLocalPath(), options);
case PMTILES -> ReadablePmtiles.newReadFromFile(archive.getLocalPath());
case CSV, TSV -> throw new UnsupportedOperationException("reading CSV is not supported");
case PROTO, PBF -> throw new UnsupportedOperationException("reading PROTO is not supported");
case JSON -> throw new UnsupportedOperationException("reading JSON is not supported");
case CSV, TSV ->
ReadableCsvArchive.newReader(archive.format(), archive.getLocalPath(), streamArchiveConfig.get());
case PROTO, PBF -> ReadableProtoStreamArchive.newReader(archive.getLocalPath(), streamArchiveConfig.get());
case JSON -> ReadableJsonStreamArchive.newReader(archive.getLocalPath(), streamArchiveConfig.get());
case FILES -> ReadableFilesArchive.newReader(archive.getLocalPath(), options);
};
}

Wyświetl plik

@ -54,10 +54,5 @@ public enum TileCompression {
public TileCompression deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
return findById(p.getValueAsString()).orElse(TileCompression.UNKNOWN);
}
@Override
public TileCompression getNullValue(DeserializationContext ctxt) {
return TileCompression.GZIP;
}
}
}

Wyświetl plik

@ -5,12 +5,10 @@ import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.OptionalLong;
import javax.annotation.Nonnull;
public record TileEncodingResult(
TileCoord coord,
@Nonnull byte[] tileData,
int rawTileSize,
byte[] tileData,
/* will always be empty in non-compact mode and might also be empty in compact mode */
OptionalLong tileDataHash,
List<String> layerStats
@ -20,7 +18,7 @@ public record TileEncodingResult(
byte[] tileData,
OptionalLong tileDataHash
) {
this(coord, tileData, tileData.length, tileDataHash, List.of());
this(coord, tileData, tileDataHash, List.of());
}
@Override

Wyświetl plik

@ -45,11 +45,17 @@ public class Arguments {
private final UnaryOperator<String> provider;
private final Supplier<? extends Collection<String>> keys;
private final String logKeyPrefix;
private boolean silent = false;
private Arguments(UnaryOperator<String> provider, Supplier<? extends Collection<String>> keys) {
private Arguments(UnaryOperator<String> provider, Supplier<? extends Collection<String>> keys, String logKeyPrefix) {
this.provider = provider;
this.keys = keys;
this.logKeyPrefix = logKeyPrefix;
}
private Arguments(UnaryOperator<String> provider, Supplier<? extends Collection<String>> keys) {
this(provider, keys, "");
}
/**
@ -65,7 +71,7 @@ public class Arguments {
}
static Arguments fromJvmProperties(UnaryOperator<String> getter, Supplier<? extends Collection<String>> keys) {
return fromPrefixed(getter, keys, "planetiler", ".", false);
return fromPrefixed(getter, keys, "planetiler", ".", false, "");
}
/**
@ -81,7 +87,7 @@ public class Arguments {
}
static Arguments fromEnvironment(UnaryOperator<String> getter, Supplier<Set<String>> keys) {
return fromPrefixed(getter, keys, "PLANETILER", "_", true);
return fromPrefixed(getter, keys, "PLANETILER", "_", true, "");
}
/**
@ -213,21 +219,22 @@ public class Arguments {
}
private static Arguments from(UnaryOperator<String> provider, Supplier<? extends Collection<String>> rawKeys,
UnaryOperator<String> forward, UnaryOperator<String> reverse) {
UnaryOperator<String> forward, UnaryOperator<String> reverse, String logKeyPrefix) {
Supplier<List<String>> keys = () -> rawKeys.get().stream().flatMap(key -> {
String reversed = reverse.apply(key);
return normalize(key).equals(normalize(reversed)) ? Stream.empty() : Stream.of(reversed);
}).toList();
return new Arguments(key -> provider.apply(forward.apply(key)), keys);
return new Arguments(key -> provider.apply(forward.apply(key)), keys, logKeyPrefix);
}
private static Arguments fromPrefixed(UnaryOperator<String> provider, Supplier<? extends Collection<String>> keys,
String prefix, String separator, boolean uppperCase) {
String prefix, String separator, boolean uppperCase, String logKeyPrefix) {
var prefixRegex = Pattern.compile("^" + Pattern.quote(normalize(prefix + separator, separator, uppperCase)),
Pattern.CASE_INSENSITIVE);
return from(provider, keys,
key -> normalize(prefix + separator + key, separator, uppperCase),
key -> normalize(prefixRegex.matcher(key).replaceFirst(""))
key -> normalize(prefixRegex.matcher(key).replaceFirst("")),
logKeyPrefix
);
}
@ -262,7 +269,8 @@ public class Arguments {
() -> Stream.concat(
other.keys.get().stream(),
keys.get().stream()
).distinct().toList()
).distinct().toList(),
other.logKeyPrefix
);
if (silent) {
result.silence();
@ -307,7 +315,7 @@ public class Arguments {
protected void logArgValue(String key, String description, Object result) {
if (!silent && LOGGER.isDebugEnabled()) {
LOGGER.debug("argument: {}={} ({})", key.replaceFirst("\\|.*$", ""), result, description);
LOGGER.debug("argument: {}{}={} ({})", logKeyPrefix, key.replaceFirst("\\|.*$", ""), result, description);
}
}
@ -532,7 +540,7 @@ public class Arguments {
* Returns a new arguments instance that translates requests for a {@code "key"} to {@code "prefix_key"}.
*/
public Arguments withPrefix(String prefix) {
return fromPrefixed(provider, keys, prefix, "_", false);
return fromPrefixed(provider, keys, prefix, "_", false, logKeyPrefix + prefix + "_");
}
/** Returns a view of this instance, that only supports requests for {@code allowedKeys}. */

Wyświetl plik

@ -0,0 +1,31 @@
package com.onthegomap.planetiler.config;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import java.time.Duration;
import java.util.stream.Stream;
public final class CommonConfigs {
private CommonConfigs() {}
public static boolean force(Arguments arguments) {
return arguments.getBoolean("force", "overwriting output file and ignore disk/RAM warnings", false);
}
public static boolean appendToArchive(Arguments arguments) {
return arguments.getBoolean("append",
"append to the output file - only supported by " + Stream.of(TileArchiveConfig.Format.values())
.filter(TileArchiveConfig.Format::supportsAppend).map(TileArchiveConfig.Format::id).toList(),
false);
}
public static int tileWriterThreads(Arguments arguments) {
return arguments.getInteger("tile_write_threads",
"number of threads used to write tiles - only supported by " + Stream.of(TileArchiveConfig.Format.values())
.filter(TileArchiveConfig.Format::supportsConcurrentWrites).map(TileArchiveConfig.Format::id).toList(),
1);
}
public static Duration logInterval(Arguments arguments) {
return arguments.getDuration("loginterval", "time between logs", "10s");
}
}

Wyświetl plik

@ -1,6 +1,5 @@
package com.onthegomap.planetiler.config;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.archive.TileCompression;
import com.onthegomap.planetiler.collection.LongLongMap;
import com.onthegomap.planetiler.collection.Storage;
@ -133,19 +132,13 @@ public record PlanetilerConfig(
featureProcessThreads,
arguments.getInteger("feature_read_threads", "number of threads to use when reading features at tile write time",
threads < 32 ? 1 : 2),
arguments.getInteger("tile_write_threads",
"number of threads used to write tiles - only supported by " + Stream.of(TileArchiveConfig.Format.values())
.filter(TileArchiveConfig.Format::supportsConcurrentWrites).map(TileArchiveConfig.Format::id).toList(),
1),
arguments.getDuration("loginterval", "time between logs", "10s"),
CommonConfigs.tileWriterThreads(arguments),
CommonConfigs.logInterval(arguments),
minzoom,
maxzoom,
renderMaxzoom,
arguments.getBoolean("force", "overwriting output file and ignore disk/RAM warnings", false),
arguments.getBoolean("append",
"append to the output file - only supported by " + Stream.of(TileArchiveConfig.Format.values())
.filter(TileArchiveConfig.Format::supportsAppend).map(TileArchiveConfig.Format::id).toList(),
false),
CommonConfigs.force(arguments),
CommonConfigs.appendToArchive(arguments),
arguments.getBoolean("gzip_temp", "gzip temporary feature storage (uses more CPU, but less disk space)", false),
arguments.getBoolean("mmap_temp", "use memory-mapped IO for temp feature files", true),
arguments.getInteger("sort_max_readers", "maximum number of concurrent read threads to use when sorting chunks",

Wyświetl plik

@ -0,0 +1,131 @@
package com.onthegomap.planetiler.copy;
import static com.onthegomap.planetiler.worker.Worker.joinFutures;
import com.onthegomap.planetiler.archive.TileArchives;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.stats.Counter;
import com.onthegomap.planetiler.stats.ProgressLoggers;
import com.onthegomap.planetiler.worker.WorkQueue;
import com.onthegomap.planetiler.worker.WorkerPipeline;
import java.io.IOException;
import java.time.Duration;
/**
* Utility to copy/convert tiles and metadata from one archive into another.
* <p>
* Example usages:
*
* <pre>
* --input=tiles.mbtiles --output=tiles.mbtiles
* --input=tiles.mbtiles --output=tiles.pmtiles --skip_empty=false
* --input=tiles.pmtiles --output=tiles.mbtiles
* --input=tiles.mbtiles --output=tiles/
* --input=tiles.mbtiles --output=tiles.json --out_tile_compression=gzip
* --input=tiles.mbtiles --output=tiles.csv --out_tile_compression=none
* --input=tiles.mbtiles --output=tiles.proto
* </pre>
*/
public class TileCopy {
private final TileCopyConfig config;
private final Counter.MultiThreadCounter tilesReadOverall = Counter.newMultiThreadCounter();
private final Counter.MultiThreadCounter tilesProcessedOverall = Counter.newMultiThreadCounter();
private final Counter.MultiThreadCounter tilesWrittenOverall = Counter.newMultiThreadCounter();
TileCopy(TileCopyConfig config) {
this.config = config;
}
public void run() throws IOException {
if (!config.inArchive().exists()) {
throw new IllegalArgumentException("the input archive does not exist");
}
config.outArchive().setup(config.force(), config.append(), config.tileWriterThreads());
final var loggers = ProgressLoggers.create()
.addRateCounter("tiles_read", tilesReadOverall::get)
.addRateCounter("tiles_processed", tilesProcessedOverall::get)
.addRateCounter("tiles_written", tilesWrittenOverall::get);
try (
var reader = TileArchives.newReader(config.inArchive(), config.inArguments());
var writer = TileArchives.newWriter(config.outArchive(), config.outArguments())
) {
final TileCopyContext context = TileCopyContext.create(reader, writer, config);
final var pipeline = WorkerPipeline.start("copy", config.stats());
final WorkQueue<TileCopyWorkItem> resultQueue =
new WorkQueue<>("results", config.workQueueCapacity(), config.workQueueMaxBatch(), config.stats());
try (
var it = TileCopyWorkItemGenerators.create(context)
) {
writer.initialize();
final var readerBranch = pipeline
.<TileCopyWorkItem>fromGenerator("iterator", next -> {
try (resultQueue) {
while (it.hasNext()) {
final TileCopyWorkItem t = it.next();
resultQueue.accept(t); // put to queue immediately => retain order
next.accept(t);
}
}
})
.addBuffer("to_read", config.workQueueCapacity(), config.workQueueMaxBatch())
.<TileCopyWorkItem>addWorker("read", config.tileReaderThreads(), (prev, next) -> {
final Counter tilesRead = tilesReadOverall.counterForThread();
for (var item : prev) {
item.loadOriginalTileData();
tilesRead.inc();
next.accept(item);
}
})
.addBuffer("to_process", config.workQueueCapacity(), config.workQueueMaxBatch())
.sinkTo("process", config.tileReaderThreads(), prev -> {
final Counter tilesProcessed = tilesProcessedOverall.counterForThread();
for (var item : prev) {
item.process();
tilesProcessed.inc();
}
});
final var writerBranch = pipeline.readFromQueue(resultQueue)
.sinkTo("write", config.tileWriterThreads(), prev -> {
final Counter tilesWritten = tilesWrittenOverall.counterForThread();
try (var tileWriter = writer.newTileWriter()) {
for (var item : prev) {
var result = item.toTileEncodingResult();
if (result.tileData() == null && config.skipEmpty()) {
continue;
}
tileWriter.write(result);
tilesWritten.inc();
}
}
});
final var writerDone = writerBranch.done().thenRun(() -> writer.finish(context.outMetadata()));
final var readerDone = readerBranch.done();
loggers
.newLine()
.addPipelineStats(readerBranch)
.addPipelineStats(writerBranch);
loggers.awaitAndLog(joinFutures(writerDone, readerDone), Duration.ofSeconds(1));
}
}
}
public static void main(String[] args) throws IOException {
new TileCopy(TileCopyConfig.fromArguments(Arguments.fromEnvOrArgs(args))).run();
}
}

Wyświetl plik

@ -0,0 +1,108 @@
package com.onthegomap.planetiler.copy;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.archive.TileArchiveMetadataDeSer;
import com.onthegomap.planetiler.archive.TileCompression;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.config.CommonConfigs;
import com.onthegomap.planetiler.geo.TileOrder;
import com.onthegomap.planetiler.stats.ProcessInfo;
import com.onthegomap.planetiler.stats.Stats;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Path;
import java.time.Duration;
import org.locationtech.jts.geom.Envelope;
record TileCopyConfig(
TileArchiveConfig inArchive,
TileArchiveConfig outArchive,
Arguments inArguments,
Arguments outArguments,
TileCompression inCompression,
TileCompression outCompression,
int tileWriterThreads,
int tileReaderThreads,
int tileProcessorThreads,
Duration logInterval,
Stats stats,
int queueSize,
boolean append,
boolean force,
TileArchiveMetadata inMetadata,
boolean skipEmpty,
Envelope filterBounds,
int filterMinzoom,
int filterMaxzoom,
boolean scanTilesInOrder,
TileOrder outputTileOrder,
int workQueueCapacity,
int workQueueMaxBatch
) {
TileCopyConfig {
if (tileReaderThreads > 1 && !inArchive.format().supportsConcurrentReads()) {
throw new IllegalArgumentException(inArchive.format().id() + " does not support concurrent reads");
}
if (tileWriterThreads > 1 && !outArchive.format().supportsConcurrentReads()) {
throw new IllegalArgumentException(outArchive.format().id() + " does not support concurrent writes");
}
if (filterMinzoom > filterMaxzoom) {
throw new IllegalArgumentException("require minzoom <= maxzoom");
}
}
static TileCopyConfig fromArguments(Arguments baseArguments) {
final Arguments inArguments = baseArguments.withPrefix("in");
final Arguments outArguments = baseArguments.withPrefix("out");
final Arguments baseOrOutArguments = outArguments.orElse(baseArguments);
final Path inMetadataPath = inArguments.file("metadata", "path to metadata.json to use instead", null);
final TileArchiveMetadata inMetadata;
if (inMetadataPath != null) {
try {
inMetadata =
TileArchiveMetadataDeSer.mbtilesMapper().readValue(inMetadataPath.toFile(), TileArchiveMetadata.class);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
} else {
inMetadata = null;
}
return new TileCopyConfig(
TileArchiveConfig.from(baseArguments.getString("input", "input tile archive")),
TileArchiveConfig.from(baseArguments.getString("output", "output tile archive")),
inArguments,
outArguments,
getTileCompressionArg(inArguments, "the input tile compression"),
getTileCompressionArg(outArguments, "the output tile compression"),
CommonConfigs.tileWriterThreads(baseOrOutArguments),
baseArguments.getInteger("tile_read_threads", "number of threads used to read tile data", 1),
baseArguments.getInteger("tile_process_threads", "number of threads used to process tile data",
Math.max(1, Runtime.getRuntime().availableProcessors() - 2)),
CommonConfigs.logInterval(baseArguments),
baseArguments.getStats(),
Math.max(100, (int) (5_000d * ProcessInfo.getMaxMemoryBytes() / 100_000_000_000d)),
CommonConfigs.appendToArchive(baseOrOutArguments),
CommonConfigs.force(baseOrOutArguments),
inMetadata,
baseArguments.getBoolean("skip_empty", "skip empty (null/zero-bytes) tiles", true),
baseArguments.bounds("filter_bounds", "the bounds to filter"),
baseArguments.getInteger("filter_minzoom", "the min zoom", 0),
baseArguments.getInteger("filter_maxzoom", "the max zoom", 14),
baseArguments.getBoolean("scan_tiles_in_order", "output the tiles in the same order they are in the", true),
baseArguments.getObject("output_tile_order", "the output tile order (if not scanned)", null,
s -> s == null ? null : TileOrder.valueOf(s.toUpperCase())),
100_000,
100
);
}
private static TileCompression getTileCompressionArg(Arguments args, String description) {
return args.getObject("tile_compression", description, TileCompression.UNKNOWN,
v -> TileCompression.findById(v).orElseThrow());
}
}

Wyświetl plik

@ -0,0 +1,123 @@
package com.onthegomap.planetiler.copy;
import com.onthegomap.planetiler.archive.ReadableTileArchive;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.archive.TileCompression;
import com.onthegomap.planetiler.archive.WriteableTileArchive;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Stream;
import org.locationtech.jts.geom.Envelope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
record TileCopyContext(
TileArchiveMetadata inMetadata,
TileArchiveMetadata outMetadata,
TileCopyConfig config,
ReadableTileArchive reader,
WriteableTileArchive writer
) {
private static final Logger LOGGER = LoggerFactory.getLogger(TileCopyContext.class);
TileCompression inputCompression() {
return inMetadata.tileCompression();
}
TileCompression outputCompression() {
return outMetadata.tileCompression();
}
static TileCopyContext create(ReadableTileArchive reader, WriteableTileArchive writer, TileCopyConfig config) {
final TileArchiveMetadata inMetadata = getInMetadata(reader, config);
final TileArchiveMetadata outMetadata = getOutMetadata(inMetadata, config);
return new TileCopyContext(
inMetadata,
outMetadata,
config,
reader,
writer
);
}
private static TileArchiveMetadata getInMetadata(ReadableTileArchive inArchive, TileCopyConfig config) {
TileArchiveMetadata inMetadata = config.inMetadata();
if (inMetadata == null) {
inMetadata = inArchive.metadata();
if (inMetadata == null) {
LOGGER.atWarn()
.log("the input archive does not contain any metadata using fallback - consider passing one via in_metadata");
inMetadata = fallbackMetadata();
}
}
if (inMetadata.tileCompression() == null) {
inMetadata = inMetadata.withTileCompression(config.inCompression());
}
return inMetadata;
}
private static TileArchiveMetadata getOutMetadata(TileArchiveMetadata inMetadata, TileCopyConfig config) {
final TileCompression tileCompression;
if (config.outCompression() == TileCompression.UNKNOWN && inMetadata.tileCompression() == TileCompression.UNKNOWN) {
tileCompression = TileCompression.GZIP;
} else if (config.outCompression() != TileCompression.UNKNOWN) {
tileCompression = config.outCompression();
} else {
tileCompression = inMetadata.tileCompression();
}
final Envelope bounds;
if (config.filterBounds() != null) {
bounds = config.filterBounds();
} else if (inMetadata.bounds() != null) {
bounds = inMetadata.bounds();
} else {
bounds = null;
}
final int minzoom = Stream.of(inMetadata.minzoom(), config.filterMinzoom()).filter(Objects::nonNull)
.mapToInt(Integer::intValue).max().orElse(0);
final int maxzoom = Stream.of(inMetadata.maxzoom(), config.filterMaxzoom()).filter(Objects::nonNull)
.mapToInt(Integer::intValue).min().orElse(0);
return new TileArchiveMetadata(
inMetadata.name(),
inMetadata.description(),
inMetadata.attribution(),
inMetadata.version(),
inMetadata.type(),
inMetadata.format(),
bounds,
inMetadata.center(),
minzoom,
maxzoom,
inMetadata.json(),
inMetadata.others(),
tileCompression
);
}
private static TileArchiveMetadata fallbackMetadata() {
return new TileArchiveMetadata(
"unknown",
null,
null,
null,
null,
TileArchiveMetadata.MVT_FORMAT, // have to guess here that it's pbf
null,
null,
0,
14,
new TileArchiveMetadata.TileArchiveMetadataJson(List.of()), // cannot provide any vector layers
Map.of(),
null
);
}
}

Wyświetl plik

@ -0,0 +1,51 @@
package com.onthegomap.planetiler.copy;
import com.onthegomap.planetiler.archive.TileEncodingResult;
import com.onthegomap.planetiler.geo.TileCoord;
import java.util.OptionalLong;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
public class TileCopyWorkItem {
private final TileCoord coord;
private final Supplier<byte[]> originalTileDataLoader;
private final UnaryOperator<byte[]> reEncoder;
private final Function<byte[], OptionalLong> hasher;
private final CompletableFuture<byte[]> originalData = new CompletableFuture<>();
private final CompletableFuture<byte[]> reEncodedData = new CompletableFuture<>();
private final CompletableFuture<OptionalLong> reEncodedDataHash = new CompletableFuture<>();
TileCopyWorkItem(
TileCoord coord,
Supplier<byte[]> originalTileDataLoader,
UnaryOperator<byte[]> reEncoder,
Function<byte[], OptionalLong> hasher
) {
this.coord = coord;
this.originalTileDataLoader = originalTileDataLoader;
this.reEncoder = reEncoder;
this.hasher = hasher;
}
public TileCoord getCoord() {
return coord;
}
void loadOriginalTileData() {
originalData.complete(originalTileDataLoader.get());
}
void process() throws ExecutionException, InterruptedException {
final var reEncoded = reEncoder.apply(originalData.get());
final var hash = hasher.apply(reEncoded);
reEncodedData.complete(reEncoded);
reEncodedDataHash.complete(hash);
}
TileEncodingResult toTileEncodingResult() throws ExecutionException, InterruptedException {
return new TileEncodingResult(coord, reEncodedData.get(), reEncodedDataHash.get());
}
}

Wyświetl plik

@ -0,0 +1,147 @@
package com.onthegomap.planetiler.copy;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.config.Bounds;
import com.onthegomap.planetiler.geo.GeoUtils;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.geo.TileExtents;
import com.onthegomap.planetiler.geo.TileOrder;
import com.onthegomap.planetiler.util.CloseableIterator;
import com.onthegomap.planetiler.util.Hashing;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.UnaryOperator;
import org.locationtech.jts.geom.Envelope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
final class TileCopyWorkItemGenerators {
private static final Logger LOGGER = LoggerFactory.getLogger(TileCopyWorkItemGenerators.class);
private TileCopyWorkItemGenerators() {}
@SuppressWarnings("java:S2095")
static CloseableIterator<TileCopyWorkItem> create(TileCopyContext context) {
final TileArchiveConfig.Format inFormat = context.config().inArchive().format();
final TileArchiveConfig.Format outFormat = context.config().outArchive().format();
final boolean inOrder;
if (outFormat == TileArchiveConfig.Format.FILES) {
inOrder = true; // always use the input formats native order when outputting files
} else if (inFormat == TileArchiveConfig.Format.FILES) {
inOrder = false; // never use the inOrder looper when using files since there's no order guarantee
} else {
inOrder = context.config().scanTilesInOrder();
}
final int minzoom = context.outMetadata().minzoom();
final int maxzoom = context.outMetadata().maxzoom();
final Envelope filterBounds =
context.outMetadata().bounds() == null ? Bounds.WORLD.world() :
GeoUtils.toWorldBounds(context.outMetadata().bounds());
final var boundsFilter = TileExtents.computeFromWorldBounds(maxzoom, filterBounds);
final Predicate<TileCopyWorkItem> zoomFilter = i -> i.getCoord().z() >= minzoom && i.getCoord().z() <= maxzoom;
final ProcessorArgs processorArgs = new ProcessorArgs(
TileDataReEncoders.create(context),
context.writer().deduplicates() ? b -> OptionalLong.of(Hashing.fnv1a64(b)) :
b -> OptionalLong.empty()
);
if (inOrder) {
CloseableIterator<TileCopyWorkItem> it = new EagerInOrder(context.reader().getAllTiles(), processorArgs)
.filter(zoomFilter);
if (!Objects.equals(context.inMetadata().bounds(), context.outMetadata().bounds())) {
it = it.filter(i -> boundsFilter.test(i.getCoord()));
}
return it;
} else {
final boolean warnPoorlySupported = switch (inFormat) {
case CSV, TSV, PROTO, PBF, JSON -> true;
case PMTILES, MBTILES, FILES -> false;
};
if (warnPoorlySupported) {
LOGGER.atWarn().log("{} random access is very slow", inFormat.id());
}
final TileOrder tileOrder = Optional.ofNullable(context.config().outputTileOrder())
.orElse(context.writer().tileOrder());
return new TileOrderLoop(tileOrder, minzoom, maxzoom, context.reader()::getTile, processorArgs)
.filter(i -> boundsFilter.test(i.getCoord()));
}
}
private record EagerInOrder(
CloseableIterator<Tile> it,
ProcessorArgs processorArgs
) implements CloseableIterator<TileCopyWorkItem> {
@Override
public void close() {
it.close();
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public TileCopyWorkItem next() {
if (!it.hasNext()) {
throw new NoSuchElementException();
}
final Tile t = it.next();
return new TileCopyWorkItem(t.coord(), t::bytes, processorArgs.reEncoder(), processorArgs.hasher());
}
}
private static class TileOrderLoop implements CloseableIterator<TileCopyWorkItem> {
private final TileOrder tileOrder;
private final int max;
private final Function<TileCoord, byte[]> dataLoader;
private final ProcessorArgs processorArgs;
private int current;
TileOrderLoop(TileOrder tileOrder, int minZoom, int maxZoom, Function<TileCoord, byte[]> dataLoader,
ProcessorArgs processorArgs) {
this.tileOrder = tileOrder;
this.current = TileCoord.startIndexForZoom(minZoom);
this.max = TileCoord.endIndexForZoom(maxZoom);
this.dataLoader = dataLoader;
this.processorArgs = processorArgs;
}
@Override
public boolean hasNext() {
return current <= max;
}
@Override
public TileCopyWorkItem next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final TileCoord c = tileOrder.decode(current++);
return new TileCopyWorkItem(c, () -> dataLoader.apply(c), processorArgs.reEncoder(), processorArgs.hasher());
}
@Override
public void close() {
// nothing to close
}
}
private record ProcessorArgs(UnaryOperator<byte[]> reEncoder, Function<byte[], OptionalLong> hasher) {}
}

Wyświetl plik

@ -0,0 +1,31 @@
package com.onthegomap.planetiler.copy;
import com.onthegomap.planetiler.archive.TileCompression;
import com.onthegomap.planetiler.util.Gzip;
import java.util.function.UnaryOperator;
final class TileDataReEncoders {
private TileDataReEncoders() {}
static UnaryOperator<byte[]> create(TileCopyContext c) {
// for now just one - but compose multiple as needed in the future (decompress, do something, compress)
return reCompressor(c.inputCompression(), c.outputCompression());
}
private static UnaryOperator<byte[]> reCompressor(TileCompression inCompression, TileCompression outCompression) {
if (inCompression == outCompression) {
return b -> b;
} else if (inCompression == TileCompression.GZIP && outCompression == TileCompression.NONE) {
return Gzip::gunzip;
} else if (inCompression == TileCompression.NONE && outCompression == TileCompression.GZIP) {
return Gzip::gzip;
} else if (inCompression == TileCompression.UNKNOWN && outCompression == TileCompression.GZIP) {
return b -> Gzip.isZipped(b) ? b : Gzip.gzip(b);
} else if (inCompression == TileCompression.UNKNOWN && outCompression == TileCompression.NONE) {
return b -> Gzip.isZipped(b) ? Gzip.gunzip(b) : b;
} else {
throw new IllegalArgumentException("unhandled case: in=" + inCompression + " out=" + outCompression);
}
}
}

Wyświetl plik

@ -159,12 +159,17 @@ public class WriteableFilesArchive implements WriteableTileArchive {
}
lastCheckedFolder = folder;
try {
Files.write(file, data);
if (data == null) {
Files.createFile(file);
} else {
Files.write(file, data);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
bytesWritten.incBy(data.length);
if (data != null) {
bytesWritten.incBy(data.length);
}
}
@Override

Wyświetl plik

@ -57,11 +57,29 @@ public class PolygonIndex<T> {
return postFilterContaining(point, items);
}
/** Returns the data associated with all polygons containing {@code point}. */
public List<T> getIntersecting(Geometry geom) {
build();
List<?> items = index.query(geom.getEnvelopeInternal());
return postFilterIntersecting(geom, items);
}
private List<T> postFilterContaining(Point point, List<?> items) {
List<T> result = new ArrayList<>(items.size());
for (Object item : items) {
if (item instanceof GeomWithData<?> value && value.poly.contains(point)) {
@SuppressWarnings("unchecked") T t = (T) value.data;
if (item instanceof GeomWithData<?>(var poly,var data) && poly.contains(point)) {
@SuppressWarnings("unchecked") T t = (T) data;
result.add(t);
}
}
return result;
}
private List<T> postFilterIntersecting(Geometry geom, List<?> items) {
List<T> result = new ArrayList<>(items.size());
for (Object item : items) {
if (item instanceof GeomWithData<?>(var poly,var data) && poly.intersects(geom)) {
@SuppressWarnings("unchecked") T t = (T) data;
result.add(t);
}
}

Wyświetl plik

@ -42,10 +42,14 @@ public record TileCoord(int encoded, int x, int y, int z) implements Comparable<
}
}
private static int startIndexForZoom(int z) {
public static int startIndexForZoom(int z) {
return ZOOM_START_INDEX[z];
}
public static int endIndexForZoom(int z) {
return ZOOM_START_INDEX[z] + (1 << z) * (1 << z) - 1;
}
private static int zoomForIndex(int idx) {
for (int z = MAX_MAXZOOM; z >= 0; z--) {
if (ZOOM_START_INDEX[z] <= idx) {

Wyświetl plik

@ -6,6 +6,7 @@ import com.onthegomap.planetiler.archive.ReadableTileArchive;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.archive.TileArchiveMetadataDeSer;
import com.onthegomap.planetiler.archive.TileCompression;
import com.onthegomap.planetiler.archive.TileEncodingResult;
import com.onthegomap.planetiler.archive.WriteableTileArchive;
import com.onthegomap.planetiler.config.Arguments;
@ -847,7 +848,11 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
*/
public TileArchiveMetadata get() {
Map<String, String> map = new HashMap<>(getAll());
return TileArchiveMetadataDeSer.mbtilesMapper().convertValue(map, TileArchiveMetadata.class);
var metadata = TileArchiveMetadataDeSer.mbtilesMapper().convertValue(map, TileArchiveMetadata.class);
if (metadata.tileCompression() == null) {
metadata = metadata.withTileCompression(TileCompression.GZIP);
}
return metadata;
}
}
}

Wyświetl plik

@ -6,7 +6,6 @@ import com.onthegomap.planetiler.VectorTile;
import com.onthegomap.planetiler.geo.GeometryException;
import com.onthegomap.planetiler.geo.TileCoord;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
@ -92,11 +91,7 @@ public class Verify {
}
private static List<VectorTile.Feature> decode(byte[] zipped) {
try {
return VectorTile.decode(gunzip(zipped));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return VectorTile.decode(gunzip(zipped));
}
/**

Wyświetl plik

@ -57,8 +57,7 @@ public final class WriteablePmtiles implements WriteableTileArchive {
this.bytesWritten = bytesWritten;
}
private static Directories makeDirectoriesWithLeaves(List<Pmtiles.Entry> subEntries, int leafSize, int attemptNum)
throws IOException {
private static Directories makeDirectoriesWithLeaves(List<Pmtiles.Entry> subEntries, int leafSize, int attemptNum) {
LOGGER.info("Building directories with {} entries per leaf, attempt {}...", leafSize, attemptNum);
ArrayList<Pmtiles.Entry> rootEntries = new ArrayList<>();
ByteArrayList leavesOutputStream = new ByteArrayList();
@ -91,9 +90,8 @@ public final class WriteablePmtiles implements WriteableTileArchive {
*
* @param entries a sorted ObjectArrayList of all entries in the tileset.
* @return byte arrays of the root and all leaf directories, and the # of leaves.
* @throws IOException if compression fails
*/
static Directories makeDirectories(List<Pmtiles.Entry> entries) throws IOException {
static Directories makeDirectories(List<Pmtiles.Entry> entries) {
int maxEntriesRootOnly = 16384;
int attemptNum = 1;
if (entries.size() < maxEntriesRootOnly) {
@ -302,6 +300,9 @@ public final class WriteablePmtiles implements WriteableTileArchive {
long offset;
OptionalLong tileDataHashOpt = encodingResult.tileDataHash();
var data = encodingResult.tileData();
if (data == null) {
return;
}
TileCoord coord = encodingResult.coord();
long tileId = coord.hilbertEncoded();

Wyświetl plik

@ -0,0 +1,50 @@
package com.onthegomap.planetiler.stream;
import com.google.common.base.Suppliers;
import java.util.Base64;
import java.util.HexFormat;
import java.util.List;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
enum CsvBinaryEncoding {
BASE64("base64", () -> Base64.getEncoder()::encodeToString, () -> Base64.getDecoder()::decode),
HEX("hex", () -> HexFormat.of()::formatHex, () -> HexFormat.of()::parseHex);
private final String id;
private final Supplier<Function<byte[], String>> encoder;
private final Supplier<Function<String, byte[]>> decoder;
private CsvBinaryEncoding(String id, Supplier<Function<byte[], String>> encoder,
Supplier<Function<String, byte[]>> decoder) {
this.id = id;
this.encoder = Suppliers.memoize(encoder::get);
this.decoder = Suppliers.memoize(decoder::get);
}
String encode(byte[] b) {
return encoder.get().apply(b);
}
byte[] decode(String s) {
return decoder.get().apply(s);
}
static List<String> ids() {
return Stream.of(CsvBinaryEncoding.values()).map(CsvBinaryEncoding::id).toList();
}
static CsvBinaryEncoding fromId(String id) {
return Stream.of(CsvBinaryEncoding.values())
.filter(de -> de.id().equals(id))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException(
"unexpected binary encoding - expected one of " + ids() + " but got " + id));
}
String id() {
return id;
}
}

Wyświetl plik

@ -0,0 +1,46 @@
package com.onthegomap.planetiler.stream;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import java.util.Arrays;
import java.util.Objects;
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "type")
@JsonSubTypes({
@JsonSubTypes.Type(value = JsonStreamArchiveEntry.TileEntry.class, name = "tile"),
@JsonSubTypes.Type(value = JsonStreamArchiveEntry.InitializationEntry.class, name = "initialization"),
@JsonSubTypes.Type(value = JsonStreamArchiveEntry.FinishEntry.class, name = "finish")
})
sealed interface JsonStreamArchiveEntry {
record TileEntry(int x, int y, int z, byte[] encodedData) implements JsonStreamArchiveEntry {
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(encodedData);
result = prime * result + Objects.hash(x, y, z);
return result;
}
@Override
public boolean equals(Object obj) {
return this == obj || (obj instanceof JsonStreamArchiveEntry.TileEntry tileEntry &&
Arrays.equals(encodedData, tileEntry.encodedData) && x == tileEntry.x && y == tileEntry.y && z == tileEntry.z);
}
@Override
public String toString() {
return "TileEntry [x=" + x + ", y=" + y + ", z=" + z + ", encodedData=" + Arrays.toString(encodedData) + "]";
}
}
record InitializationEntry() implements JsonStreamArchiveEntry {}
record FinishEntry(TileArchiveMetadata metadata) implements JsonStreamArchiveEntry {}
}

Wyświetl plik

@ -0,0 +1,108 @@
package com.onthegomap.planetiler.stream;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.util.CloseableIterator;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Path;
import java.util.Optional;
import java.util.Scanner;
import java.util.function.Function;
import java.util.regex.Pattern;
/**
* Reads tiles from a CSV file. Counterpart to {@link WriteableCsvArchive}.
* <p>
* Supported arguments:
* <dl>
* <dt>column_separator</dt>
* <dd>The column separator e.g. ",", ";", "\t"</dd>
* <dt>line_separator</dt>
* <dd>The line separator e.g. "\n", "\r", "\r\n"</dd>
* </dl>
*
* @see WriteableCsvArchive
*/
public class ReadableCsvArchive extends ReadableStreamArchive<String> {
private final Pattern columnSeparatorPattern;
private final Pattern lineSeparatorPattern;
private final Function<String, byte[]> tileDataDecoder;
private ReadableCsvArchive(TileArchiveConfig.Format format, Path basePath, StreamArchiveConfig config) {
super(basePath, config);
this.columnSeparatorPattern =
Pattern.compile(Pattern.quote(StreamArchiveUtils.csvOptionColumnSeparator(config.formatOptions(), format)));
this.lineSeparatorPattern =
Pattern.compile(Pattern.quote(StreamArchiveUtils.csvOptionLineSeparator(config.formatOptions(), format)));
final CsvBinaryEncoding binaryEncoding = StreamArchiveUtils.csvOptionBinaryEncoding(config.formatOptions());
this.tileDataDecoder = binaryEncoding::decode;
}
public static ReadableCsvArchive newReader(TileArchiveConfig.Format format, Path basePath,
StreamArchiveConfig config) {
return new ReadableCsvArchive(format, basePath, config);
}
@Override
CloseableIterator<String> createIterator() {
try {
@SuppressWarnings("java:S2095") final Scanner s =
new Scanner(basePath.toFile()).useDelimiter(lineSeparatorPattern);
return new CloseableIterator<>() {
@Override
public void close() {
s.close();
}
@Override
public boolean hasNext() {
return s.hasNext();
}
@Override
public String next() {
return s.next();
}
};
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
Optional<Tile> mapEntryToTile(String entry) {
final String[] splits = columnSeparatorPattern.split(entry);
final byte[] bytes;
if (splits.length == 4) {
bytes = tileDataDecoder.apply(splits[3].strip());
} else if (splits.length == 3) {
bytes = null;
} else {
throw new InvalidCsvFormat(entry.length() > 20 ? entry.substring(0, 20) + "..." : entry);
}
return Optional.of(new Tile(
TileCoord.ofXYZ(
Integer.parseInt(splits[0].strip()),
Integer.parseInt(splits[1].strip()),
Integer.parseInt(splits[2].strip())
),
bytes
));
}
@Override
Optional<TileArchiveMetadata> mapEntryToMetadata(String entry) {
return Optional.empty();
}
static class InvalidCsvFormat extends RuntimeException {
InvalidCsvFormat(String message) {
super(message);
}
}
}

Wyświetl plik

@ -0,0 +1,82 @@
package com.onthegomap.planetiler.stream;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.util.CloseableIterator;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Optional;
/**
* Reads tiles and metadata from a delimited JSON file. Counterpart to {@link WriteableJsonStreamArchive}.
*
* @see WriteableJsonStreamArchive
*/
public class ReadableJsonStreamArchive extends ReadableStreamArchive<JsonStreamArchiveEntry> {
private ReadableJsonStreamArchive(Path basePath, StreamArchiveConfig config) {
super(basePath, config);
}
public static ReadableJsonStreamArchive newReader(Path basePath, StreamArchiveConfig config) {
return new ReadableJsonStreamArchive(basePath, config);
}
@Override
CloseableIterator<JsonStreamArchiveEntry> createIterator() {
BufferedReader reader = null;
try {
reader = Files.newBufferedReader(basePath);
final var readerFinal = reader;
final var it = StreamArchiveUtils.jsonMapperJsonStreamArchive
.readerFor(JsonStreamArchiveEntry.class)
.<JsonStreamArchiveEntry>readValues(readerFinal);
return new CloseableIterator<>() {
@Override
public void close() {
try {
readerFinal.close();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public JsonStreamArchiveEntry next() {
return it.next();
}
};
} catch (IOException e) {
closeSilentlyOnError(reader);
throw new UncheckedIOException(e);
}
}
@Override
Optional<Tile> mapEntryToTile(JsonStreamArchiveEntry entry) {
if (entry instanceof JsonStreamArchiveEntry.TileEntry tileEntry) {
return Optional.of(new Tile(
TileCoord.ofXYZ(tileEntry.x(), tileEntry.y(), tileEntry.z()),
tileEntry.encodedData()
));
}
return Optional.empty();
}
@Override
Optional<TileArchiveMetadata> mapEntryToMetadata(JsonStreamArchiveEntry entry) {
if (entry instanceof JsonStreamArchiveEntry.FinishEntry finishEntry) {
return Optional.ofNullable(finishEntry.metadata());
}
return Optional.empty();
}
}

Wyświetl plik

@ -0,0 +1,160 @@
package com.onthegomap.planetiler.stream;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.archive.TileCompression;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.proto.StreamArchiveProto;
import com.onthegomap.planetiler.util.CloseableIterator;
import com.onthegomap.planetiler.util.LayerAttrStats;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.locationtech.jts.geom.Coordinate;
import org.locationtech.jts.geom.CoordinateXY;
import org.locationtech.jts.geom.Envelope;
/**
* Reads tiles and metadata from a delimited protobuf file. Counterpart to {@link WriteableProtoStreamArchive}.
*
* @see WriteableProtoStreamArchive
*/
public class ReadableProtoStreamArchive extends ReadableStreamArchive<StreamArchiveProto.Entry> {
private ReadableProtoStreamArchive(Path basePath, StreamArchiveConfig config) {
super(basePath, config);
}
public static ReadableProtoStreamArchive newReader(Path basePath, StreamArchiveConfig config) {
return new ReadableProtoStreamArchive(basePath, config);
}
@Override
CloseableIterator<StreamArchiveProto.Entry> createIterator() {
try {
@SuppressWarnings("java:S2095") var in = new FileInputStream(basePath.toFile());
return new CloseableIterator<>() {
private StreamArchiveProto.Entry nextValue;
@Override
public void close() {
closeUnchecked(in);
}
@Override
public boolean hasNext() {
if (nextValue != null) {
return true;
}
try {
nextValue = StreamArchiveProto.Entry.parseDelimitedFrom(in);
return nextValue != null;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
public StreamArchiveProto.Entry next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final StreamArchiveProto.Entry returnValue = nextValue;
nextValue = null;
return returnValue;
}
};
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
Optional<Tile> mapEntryToTile(StreamArchiveProto.Entry entry) {
if (entry.getEntryCase() != StreamArchiveProto.Entry.EntryCase.TILE) {
return Optional.empty();
}
final StreamArchiveProto.TileEntry tileEntry = entry.getTile();
return Optional.of(new Tile(
TileCoord.ofXYZ(tileEntry.getX(), tileEntry.getY(), tileEntry.getZ()),
tileEntry.getEncodedData().toByteArray()
));
}
@Override
Optional<TileArchiveMetadata> mapEntryToMetadata(StreamArchiveProto.Entry entry) {
if (entry.getEntryCase() != StreamArchiveProto.Entry.EntryCase.FINISH) {
return Optional.empty();
}
final StreamArchiveProto.Metadata metadata = entry.getFinish().getMetadata();
return Optional.of(new TileArchiveMetadata(
StringUtils.trimToNull(metadata.getName()),
StringUtils.trimToNull(metadata.getDescription()),
StringUtils.trimToNull(metadata.getAttribution()),
StringUtils.trimToNull(metadata.getVersion()),
StringUtils.trimToNull(metadata.getType()),
StringUtils.trimToNull(metadata.getFormat()),
deserializeEnvelope(metadata.hasBounds() ? metadata.getBounds() : null),
deserializeCoordinate(metadata.hasCenter() ? metadata.getCenter() : null),
metadata.hasMinZoom() ? metadata.getMinZoom() : null,
metadata.hasMaxZoom() ? metadata.getMaxZoom() : null,
extractMetadataJson(metadata),
metadata.getOthersMap(),
deserializeTileCompression(metadata.getTileCompression())
));
}
private Envelope deserializeEnvelope(StreamArchiveProto.Envelope s) {
return s == null ? null : new Envelope(s.getMinX(), s.getMaxX(), s.getMinY(), s.getMaxY());
}
private Coordinate deserializeCoordinate(StreamArchiveProto.Coordinate s) {
if (s == null) {
return null;
}
return s.hasZ() ? new Coordinate(s.getX(), s.getY(), s.getZ()) : new CoordinateXY(s.getX(), s.getY());
}
private TileCompression deserializeTileCompression(StreamArchiveProto.TileCompression s) {
return switch (s) {
case TILE_COMPRESSION_UNSPECIFIED, UNRECOGNIZED -> TileCompression.UNKNOWN;
case TILE_COMPRESSION_GZIP -> TileCompression.GZIP;
case TILE_COMPRESSION_NONE -> TileCompression.NONE;
};
}
private TileArchiveMetadata.TileArchiveMetadataJson extractMetadataJson(StreamArchiveProto.Metadata s) {
final List<LayerAttrStats.VectorLayer> vl = deserializeVectorLayers(s.getVectorLayersList());
return vl.isEmpty() ? null : new TileArchiveMetadata.TileArchiveMetadataJson(vl);
}
private List<LayerAttrStats.VectorLayer> deserializeVectorLayers(List<StreamArchiveProto.VectorLayer> s) {
return s.stream()
.map(vl -> new LayerAttrStats.VectorLayer(
vl.getId(),
vl.getFieldsMap().entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> deserializeFieldType(e.getValue()))),
Optional.ofNullable(StringUtils.trimToNull(vl.getDescription())),
vl.hasMinZoom() ? OptionalInt.of(vl.getMinZoom()) : OptionalInt.empty(),
vl.hasMaxZoom() ? OptionalInt.of(vl.getMaxZoom()) : OptionalInt.empty()
))
.toList();
}
private LayerAttrStats.FieldType deserializeFieldType(StreamArchiveProto.VectorLayer.FieldType s) {
return switch (s) {
case FIELD_TYPE_UNSPECIFIED, UNRECOGNIZED -> throw new IllegalArgumentException("unknown type");
case FIELD_TYPE_NUMBER -> LayerAttrStats.FieldType.NUMBER;
case FIELD_TYPE_BOOLEAN -> LayerAttrStats.FieldType.BOOLEAN;
case FIELD_TYPE_STRING -> LayerAttrStats.FieldType.STRING;
};
}
}

Wyświetl plik

@ -0,0 +1,102 @@
package com.onthegomap.planetiler.stream;
import com.google.common.base.Suppliers;
import com.onthegomap.planetiler.archive.ReadableTileArchive;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.util.CloseableIterator;
import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Optional;
import java.util.function.Supplier;
abstract class ReadableStreamArchive<E> implements ReadableTileArchive {
private final Supplier<TileArchiveMetadata> cachedMetadata = Suppliers.memoize(this::loadMetadata);
final Path basePath;
final StreamArchiveConfig config;
ReadableStreamArchive(Path basePath, StreamArchiveConfig config) {
this.basePath = basePath;
this.config = config;
}
@Override
public final byte[] getTile(TileCoord coord) {
try (var tiles = getAllTiles(); var s = tiles.stream()) {
return s.filter(c -> c.coord().equals(coord)).map(Tile::bytes).findFirst().orElse(null);
}
}
@Override
public final byte[] getTile(int x, int y, int z) {
return getTile(TileCoord.ofXYZ(x, y, z));
}
/**
* Callers MUST make sure to close the iterator/derived stream!
*/
@Override
public final CloseableIterator<TileCoord> getAllTileCoords() {
return getAllTiles().map(Tile::coord);
}
/**
* Callers MUST make sure to close the iterator/derived stream!
*/
@Override
public final CloseableIterator<Tile> getAllTiles() {
return createIterator()
.map(this::mapEntryToTile)
.filter(Optional::isPresent)
.map(Optional::get);
}
@Override
public final TileArchiveMetadata metadata() {
return cachedMetadata.get();
}
private TileArchiveMetadata loadMetadata() {
try (var i = createIterator(); var s = i.stream()) {
return s.map(this::mapEntryToMetadata).flatMap(Optional::stream).findFirst().orElse(null);
}
}
@Override
public void close() throws IOException {
// nothing to close
}
abstract CloseableIterator<E> createIterator();
abstract Optional<Tile> mapEntryToTile(E entry);
abstract Optional<TileArchiveMetadata> mapEntryToMetadata(E entry);
void closeSilentlyOnError(Closeable c) {
if (c == null) {
return;
}
try {
c.close();
} catch (Exception ignored) {
// ignore
}
}
@SuppressWarnings("java:S112")
void closeUnchecked(Closeable c) {
if (c == null) {
return;
}
try {
c.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}

Wyświetl plik

@ -1,10 +1,10 @@
package com.onthegomap.planetiler.stream;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.config.PlanetilerConfig;
import com.onthegomap.planetiler.config.CommonConfigs;
public record StreamArchiveConfig(boolean appendToFile, Arguments moreOptions) {
public StreamArchiveConfig(PlanetilerConfig planetilerConfig, Arguments moreOptions) {
this(planetilerConfig.append(), moreOptions);
public record StreamArchiveConfig(boolean appendToFile, Arguments formatOptions) {
public StreamArchiveConfig(Arguments baseArguments, Arguments formatOptions) {
this(CommonConfigs.appendToArchive(baseArguments), formatOptions);
}
}

Wyświetl plik

@ -1,5 +1,8 @@
package com.onthegomap.planetiler.stream;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.google.common.net.UrlEscapers;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.config.Arguments;
@ -12,8 +15,25 @@ import org.apache.commons.text.StringEscapeUtils;
public final class StreamArchiveUtils {
/**
* exposing meta data (non-tile data) might be useful for most use cases but complicates parsing for simple use cases
* => allow to output tiles, only
*/
private static final String JSON_OPTION_WRITE_TILES_ONLY = "tiles_only";
private static final String JSON_OPTION_ROOT_VALUE_SEPARATOR = "root_value_separator";
static final String CSV_OPTION_COLUMN_SEPARATOR = "column_separator";
static final String CSV_OPTION_LINE_SEPARATOR = "line_separator";
static final String CSV_OPTION_BINARY_ENCODING = "binary_encoding";
private static final Pattern quotedPattern = Pattern.compile("^'(.+?)'$");
static final JsonMapper jsonMapperJsonStreamArchive = JsonMapper.builder()
.serializationInclusion(JsonInclude.Include.NON_ABSENT)
.addModule(new Jdk8Module())
.build();
private StreamArchiveUtils() {}
public static Path constructIndexedPath(Path basePath, int index) {
@ -39,6 +59,36 @@ public final class StreamArchiveUtils {
.translateEscapes();
}
static String jsonOptionRootValueSeparator(Arguments formatOptions) {
return getEscapedString(formatOptions, TileArchiveConfig.Format.JSON,
JSON_OPTION_ROOT_VALUE_SEPARATOR, "root value separator", "'\\n'", List.of("\n", " "));
}
static boolean jsonOptionWriteTilesOnly(Arguments formatOptions) {
return formatOptions.getBoolean(JSON_OPTION_WRITE_TILES_ONLY, "write tiles, only", false);
}
static String csvOptionColumnSeparator(Arguments formatOptions, TileArchiveConfig.Format format) {
final String defaultColumnSeparator = switch (format) {
case CSV -> "','";
case TSV -> "'\\t'";
default -> throw new IllegalArgumentException("supported formats are csv and tsv but got " + format.id());
};
return getEscapedString(formatOptions, format,
CSV_OPTION_COLUMN_SEPARATOR, "column separator", defaultColumnSeparator, List.of(",", " "));
}
static String csvOptionLineSeparator(Arguments formatOptions, TileArchiveConfig.Format format) {
return StreamArchiveUtils.getEscapedString(formatOptions, format,
CSV_OPTION_LINE_SEPARATOR, "line separator", "'\\n'", List.of("\n", "\r\n"));
}
static CsvBinaryEncoding csvOptionBinaryEncoding(Arguments formatOptions) {
return CsvBinaryEncoding.fromId(formatOptions.getString(CSV_OPTION_BINARY_ENCODING,
"binary (tile) data encoding - one of " + CsvBinaryEncoding.ids(), "base64"));
}
private static String escapeJava(String s) {
if (!s.trim().equals(s)) {
s = "'" + s + "'";

Wyświetl plik

@ -11,11 +11,7 @@ import java.io.UncheckedIOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Base64;
import java.util.HexFormat;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* Writes tile data into a CSV file (or pipe).
@ -67,31 +63,16 @@ import java.util.stream.Stream;
*/
public final class WriteableCsvArchive extends WriteableStreamArchive {
static final String OPTION_COLUMN_SEPARATOR = "column_separator";
static final String OPTION_LINE_SEPARTATOR = "line_separator";
static final String OPTION_BINARY_ENCODING = "binary_encoding";
private final String columnSeparator;
private final String lineSeparator;
private final Function<byte[], String> tileDataEncoder;
private WriteableCsvArchive(TileArchiveConfig.Format format, Path p, StreamArchiveConfig config) {
super(p, config);
final String defaultColumnSeparator = switch (format) {
case CSV -> "','";
case TSV -> "'\\t'";
default -> throw new IllegalArgumentException("supported formats are csv and tsv but got " + format.id());
};
this.columnSeparator = StreamArchiveUtils.getEscapedString(config.moreOptions(), format,
OPTION_COLUMN_SEPARATOR, "column separator", defaultColumnSeparator, List.of(",", " "));
this.lineSeparator = StreamArchiveUtils.getEscapedString(config.moreOptions(), format,
OPTION_LINE_SEPARTATOR, "line separator", "'\\n'", List.of("\n", "\r\n"));
final BinaryEncoding binaryEncoding = BinaryEncoding.fromId(config.moreOptions().getString(OPTION_BINARY_ENCODING,
"binary (tile) data encoding - one of " + BinaryEncoding.ids(), "base64"));
this.tileDataEncoder = switch (binaryEncoding) {
case BASE64 -> Base64.getEncoder()::encodeToString;
case HEX -> HexFormat.of()::formatHex;
};
this.columnSeparator = StreamArchiveUtils.csvOptionColumnSeparator(config.formatOptions(), format);
this.lineSeparator = StreamArchiveUtils.csvOptionLineSeparator(config.formatOptions(), format);
final CsvBinaryEncoding binaryEncoding = StreamArchiveUtils.csvOptionBinaryEncoding(config.formatOptions());
this.tileDataEncoder = binaryEncoding::encode;
}
public static WriteableCsvArchive newWriteToFile(TileArchiveConfig.Format format, Path path,
@ -131,7 +112,8 @@ public final class WriteableCsvArchive extends WriteableStreamArchive {
@Override
public void write(TileEncodingResult encodingResult) {
final TileCoord coord = encodingResult.coord();
final String tileDataEncoded = tileDataEncoder.apply(encodingResult.tileData());
final byte[] data = encodingResult.tileData();
final String tileDataEncoded = data == null ? "" : tileDataEncoder.apply(encodingResult.tileData());
try {
// x | y | z | encoded data
writer.write("%d%s%d%s%d%s%s%s".formatted(coord.x(), columnSeparator, coord.y(), columnSeparator, coord.z(),
@ -150,32 +132,4 @@ public final class WriteableCsvArchive extends WriteableStreamArchive {
}
}
}
private enum BinaryEncoding {
BASE64("base64"),
HEX("hex");
private final String id;
private BinaryEncoding(String id) {
this.id = id;
}
static List<String> ids() {
return Stream.of(BinaryEncoding.values()).map(BinaryEncoding::id).toList();
}
static BinaryEncoding fromId(String id) {
return Stream.of(BinaryEncoding.values())
.filter(de -> de.id().equals(id))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException(
"unexpected binary encoding - expected one of " + ids() + " but got " + id));
}
String id() {
return id;
}
}
}

Wyświetl plik

@ -1,14 +1,8 @@
package com.onthegomap.planetiler.stream;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SequenceWriter;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.archive.TileEncodingResult;
import com.onthegomap.planetiler.geo.TileCoord;
@ -19,41 +13,26 @@ import java.io.OutputStreamWriter;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Writes JSON-serialized tile data as well as meta data into file(s). The entries are of type
* {@link WriteableJsonStreamArchive.Entry} are separated by newline (by default).
* {@link JsonStreamArchiveEntry} are separated by newline (by default).
*/
public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
private static final Logger LOGGER = LoggerFactory.getLogger(WriteableJsonStreamArchive.class);
/**
* exposing meta data (non-tile data) might be useful for most use cases but complicates parsing for simple use cases
* => allow to output tiles, only
*/
private static final String OPTION_WRITE_TILES_ONLY = "tiles_only";
private static final String OPTION_ROOT_VALUE_SEPARATOR = "root_value_separator";
static final JsonMapper jsonMapper = JsonMapper.builder()
.serializationInclusion(Include.NON_ABSENT)
.addModule(new Jdk8Module())
.build();
private static final JsonMapper jsonMapper = StreamArchiveUtils.jsonMapperJsonStreamArchive;
private final boolean writeTilesOnly;
private final String rootValueSeparator;
private WriteableJsonStreamArchive(Path p, StreamArchiveConfig config) {
super(p, config);
this.writeTilesOnly = config.moreOptions().getBoolean(OPTION_WRITE_TILES_ONLY, "write tiles, only", false);
this.rootValueSeparator = StreamArchiveUtils.getEscapedString(config.moreOptions(), TileArchiveConfig.Format.JSON,
OPTION_ROOT_VALUE_SEPARATOR, "root value separator", "'\\n'", List.of("\n", " "));
this.writeTilesOnly = StreamArchiveUtils.jsonOptionWriteTilesOnly(config.formatOptions());
this.rootValueSeparator = StreamArchiveUtils.jsonOptionRootValueSeparator(config.formatOptions());
}
public static WriteableJsonStreamArchive newWriteToFile(Path path, StreamArchiveConfig config) {
@ -70,7 +49,7 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
if (writeTilesOnly) {
return;
}
writeEntryFlush(new InitializationEntry());
writeEntryFlush(new JsonStreamArchiveEntry.InitializationEntry());
}
@Override
@ -78,13 +57,13 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
if (writeTilesOnly) {
return;
}
writeEntryFlush(new FinishEntry(metadata));
writeEntryFlush(new JsonStreamArchiveEntry.FinishEntry(metadata));
}
private void writeEntryFlush(Entry entry) {
private void writeEntryFlush(JsonStreamArchiveEntry entry) {
try (var out = new OutputStreamWriter(getPrimaryOutputStream(), StandardCharsets.UTF_8.newEncoder())) {
jsonMapper
.writerFor(Entry.class)
.writerFor(JsonStreamArchiveEntry.class)
.withoutFeatures(JsonGenerator.Feature.AUTO_CLOSE_TARGET)
.writeValue(out, entry);
out.write(rootValueSeparator);
@ -104,7 +83,8 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
this.rootValueSeparator = rootValueSeparator;
try {
this.jsonWriter =
jsonMapper.writerFor(Entry.class).withRootValueSeparator(rootValueSeparator).writeValues(outputStream);
jsonMapper.writerFor(JsonStreamArchiveEntry.class).withRootValueSeparator(rootValueSeparator)
.writeValues(outputStream);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
@ -114,7 +94,8 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
public void write(TileEncodingResult encodingResult) {
final TileCoord coord = encodingResult.coord();
try {
jsonWriter.write(new TileEntry(coord.x(), coord.y(), coord.z(), encodingResult.tileData()));
jsonWriter
.write(new JsonStreamArchiveEntry.TileEntry(coord.x(), coord.y(), coord.z(), encodingResult.tileData()));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
@ -150,53 +131,4 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
}
}
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "type")
@JsonSubTypes({
@Type(value = TileEntry.class, name = "tile"),
@Type(value = InitializationEntry.class, name = "initialization"),
@Type(value = FinishEntry.class, name = "finish")
})
sealed interface Entry {
}
record TileEntry(int x, int y, int z, byte[] encodedData) implements Entry {
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(encodedData);
result = prime * result + Objects.hash(x, y, z);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof TileEntry)) {
return false;
}
TileEntry other = (TileEntry) obj;
return Arrays.equals(encodedData, other.encodedData) && x == other.x && y == other.y && z == other.z;
}
@Override
public String toString() {
return "TileEntry [x=" + x + ", y=" + y + ", z=" + z + ", encodedData=" + Arrays.toString(encodedData) + "]";
}
}
record InitializationEntry() implements Entry {}
record FinishEntry(TileArchiveMetadata metadata) implements Entry {}
}

Wyświetl plik

@ -158,15 +158,17 @@ public final class WriteableProtoStreamArchive extends WriteableStreamArchive {
@Override
public void write(TileEncodingResult encodingResult) {
final TileCoord coord = encodingResult.coord();
final StreamArchiveProto.TileEntry tile = StreamArchiveProto.TileEntry.newBuilder()
final byte[] data = encodingResult.tileData();
StreamArchiveProto.TileEntry.Builder tileBuilder = StreamArchiveProto.TileEntry.newBuilder()
.setZ(coord.z())
.setX(coord.x())
.setY(coord.y())
.setEncodedData(ByteString.copyFrom(encodingResult.tileData()))
.build();
.setY(coord.y());
if (data != null) {
tileBuilder = tileBuilder.setEncodedData(ByteString.copyFrom(encodingResult.tileData()));
}
final StreamArchiveProto.Entry entry = StreamArchiveProto.Entry.newBuilder()
.setTile(tile)
.setTile(tileBuilder.build())
.build();
try {

Wyświetl plik

@ -2,8 +2,10 @@ package com.onthegomap.planetiler.util;
import java.io.Closeable;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Spliterators;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
@ -56,4 +58,42 @@ public interface CloseableIterator<T> extends Closeable, Iterator<T> {
}
};
}
default CloseableIterator<T> filter(Predicate<T> predicate) {
final var parent = this;
return new CloseableIterator<>() {
private T nextValue;
@Override
public void close() {
parent.close();
}
@Override
public boolean hasNext() {
if (nextValue != null) {
return true;
}
while (parent.hasNext()) {
final T parentNext = parent.next();
if (predicate.test(parentNext)) {
nextValue = parentNext;
break;
}
}
return nextValue != null;
}
@Override
public T next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
final T returnValue = nextValue;
nextValue = null;
return returnValue;
}
};
}
}

Wyświetl plik

@ -3,22 +3,42 @@ package com.onthegomap.planetiler.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class Gzip {
public final class Gzip {
public static byte[] gzip(byte[] in) throws IOException {
private Gzip() {}
@SuppressWarnings("java:S1168") // null in, null out
public static byte[] gzip(byte[] in) {
if (in == null) {
return null;
}
var bos = new ByteArrayOutputStream(in.length);
try (var gzipOS = new GZIPOutputStream(bos)) {
gzipOS.write(in);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return bos.toByteArray();
}
public static byte[] gunzip(byte[] zipped) throws IOException {
@SuppressWarnings("java:S1168") // null in, null out
public static byte[] gunzip(byte[] zipped) {
if (zipped == null) {
return null;
}
try (var is = new GZIPInputStream(new ByteArrayInputStream(zipped))) {
return is.readAllBytes();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public static boolean isZipped(byte[] in) {
return in != null && in.length > 2 && in[0] == (byte) GZIPInputStream.GZIP_MAGIC &&
in[1] == (byte) (GZIPInputStream.GZIP_MAGIC >> 8);
}
}

Wyświetl plik

@ -61,6 +61,9 @@ public final class Hashing {
*/
public static long fnv1a64(long initHash, byte... data) {
long hash = initHash;
if (data == null) {
return hash;
}
for (byte datum : data) {
hash ^= (datum & 0xff);
hash *= FNV1_PRIME_64;

Wyświetl plik

@ -21,7 +21,7 @@ message InitializationEntry {
}
message FinishEntry {
Metadata metadata = 1;
optional Metadata metadata = 1;
}
message Metadata {
@ -32,10 +32,10 @@ message Metadata {
string version = 4;
string type = 5;
string format = 6;
Envelope bounds = 7;
Coordinate center = 8;
int32 min_zoom = 9;
int32 max_zoom = 10;
optional Envelope bounds = 7;
optional Coordinate center = 8;
optional int32 min_zoom = 9;
optional int32 max_zoom = 10;
repeated VectorLayer vector_layers = 11;
map<string, string> others = 12;
TileCompression tile_compression = 13;
@ -51,15 +51,15 @@ message Envelope {
message Coordinate {
double x = 1;
double y = 2;
double z = 3;
optional double z = 3;
}
message VectorLayer {
string id = 1;
map<string, FieldType> fields = 2;
string description = 3;
int32 min_zoom = 4;
int32 max_zoom = 5;
optional int32 min_zoom = 4;
optional int32 max_zoom = 5;
enum FieldType {
FIELD_TYPE_UNSPECIFIED = 0;

Wyświetl plik

@ -31,7 +31,10 @@ import com.onthegomap.planetiler.reader.osm.OsmElement;
import com.onthegomap.planetiler.reader.osm.OsmReader;
import com.onthegomap.planetiler.reader.osm.OsmRelationInfo;
import com.onthegomap.planetiler.stats.Stats;
import com.onthegomap.planetiler.stream.InMemoryStreamArchive;
import com.onthegomap.planetiler.stream.ReadableCsvArchive;
import com.onthegomap.planetiler.stream.ReadableJsonStreamArchive;
import com.onthegomap.planetiler.stream.ReadableProtoStreamArchive;
import com.onthegomap.planetiler.stream.StreamArchiveConfig;
import com.onthegomap.planetiler.util.BuildInfo;
import com.onthegomap.planetiler.util.Gzip;
import com.onthegomap.planetiler.util.TileSizeStats;
@ -2040,11 +2043,12 @@ class PlanetilerTests {
final ReadableTileArchiveFactory readableTileArchiveFactory = switch (format) {
case MBTILES -> Mbtiles::newReadOnlyDatabase;
case CSV -> p -> InMemoryStreamArchive.fromCsv(p, ",");
case TSV -> p -> InMemoryStreamArchive.fromCsv(p, "\t");
case JSON -> InMemoryStreamArchive::fromJson;
case CSV, TSV ->
p -> ReadableCsvArchive.newReader(format, p, new StreamArchiveConfig(false, Arguments.of()));
case JSON -> p -> ReadableJsonStreamArchive.newReader(p, new StreamArchiveConfig(false, Arguments.of()));
case PMTILES -> ReadablePmtiles::newReadFromFile;
case PROTO, PBF -> InMemoryStreamArchive::fromProtobuf;
case PROTO, PBF ->
p -> ReadableProtoStreamArchive.newReader(p, new StreamArchiveConfig(false, Arguments.of()));
case FILES -> p -> ReadableFilesArchive.newReader(p, Arguments.of());
};

Wyświetl plik

@ -80,7 +80,7 @@ public class TestUtils {
public static final TileArchiveMetadata MAX_METADATA_DESERIALIZED =
new TileArchiveMetadata("name", "description", "attribution", "version", "type", "format", new Envelope(0, 1, 2, 3),
new Coordinate(1.3, 3.7, 1.0), 2, 3,
new Coordinate(1.3, 3.7, 1.0), 0, 8,
TileArchiveMetadata.TileArchiveMetadataJson.create(
List.of(
new LayerAttrStats.VectorLayer("vl0",
@ -102,8 +102,8 @@ public class TestUtils {
"version":"version",
"type":"type",
"format":"format",
"minzoom":"2",
"maxzoom":"3",
"minzoom":"0",
"maxzoom":"8",
"compression":"gzip",
"bounds":"0,2,1,3",
"center":"1.3,3.7,1",
@ -294,7 +294,9 @@ public class TestUtils {
}
public static Set<Tile> getTiles(ReadableTileArchive db) {
return db.getAllTiles().stream().collect(Collectors.toSet());
try (var t = db.getAllTiles(); var s = t.stream()) {
return s.collect(Collectors.toUnmodifiableSet());
}
}
public static int getTilesDataCount(Mbtiles db) throws SQLException {
@ -770,7 +772,7 @@ public class TestUtils {
if (!failures.isEmpty()) {
fail(String.join(System.lineSeparator(), failures));
}
} catch (GeometryException | IOException e) {
} catch (GeometryException e) {
fail(e);
}
}

Wyświetl plik

@ -0,0 +1,248 @@
package com.onthegomap.planetiler.copy;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.onthegomap.planetiler.TestUtils;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.util.Gzip;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Base64;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
class TileCopyTest {
private static final String ARCHIVE_0_JSON_BASE = """
{"type":"initialization"}
{"type":"tile","x":0,"y":0,"z":0,"encodedData":"AA=="}
{"type":"tile","x":1,"y":2,"z":3,"encodedData":"AQ=="}
{"type":"finish","metadata":%s}
""";
private static final String ARCHIVE_0_CSV_COMPRESSION_NONE = """
0,0,0,AA==
1,2,3,AQ==
""";
private static final String EXTERNAL_METADATA = "{\"name\": \"blub\"}";
@ParameterizedTest(name = "{index} - {0}")
@ArgumentsSource(TestArgs.class)
void testSimple(String testName, String archiveDataIn, String archiveDataOut, Map<String, String> arguments,
@TempDir Path tempDir) throws Exception {
final Path archiveInPath = tempDir.resolve(archiveDataIn.contains("{") ? "in.json" : "in.csv");
final Path archiveOutPath = tempDir.resolve(archiveDataOut.contains("{") ? "out.json" : "out.csv");
final Path inMetadataPath = tempDir.resolve("metadata.json");
Files.writeString(archiveInPath, archiveDataIn);
Files.writeString(inMetadataPath, EXTERNAL_METADATA);
arguments = new LinkedHashMap<>(arguments);
arguments.replace("in_metadata", inMetadataPath.toString());
final Arguments args = Arguments.of(Map.of(
"input", archiveInPath.toString(),
"output", archiveOutPath.toString()
)).orElse(Arguments.of(arguments));
new TileCopy(TileCopyConfig.fromArguments(args)).run();
if (archiveDataOut.contains("{")) {
final List<String> expectedLines = Arrays.stream(archiveDataOut.split("\n")).toList();
final List<String> actualLines = Files.readAllLines(archiveOutPath);
assertEquals(expectedLines.size(), actualLines.size());
for (int i = 0; i < expectedLines.size(); i++) {
TestUtils.assertSameJson(expectedLines.get(i), actualLines.get(i));
}
} else {
assertEquals(archiveDataOut, Files.readString(archiveOutPath));
}
}
private static String compressBase64(String archiveIn) {
final Base64.Encoder encoder = Base64.getEncoder();
for (int i = 0; i <= 1; i++) {
archiveIn = archiveIn.replace(
encoder.encodeToString(new byte[]{(byte) i}),
encoder.encodeToString(Gzip.gzip(new byte[]{(byte) i}))
);
}
return archiveIn;
}
private static String replaceBase64(String archiveIn, String replacement) {
final Base64.Encoder encoder = Base64.getEncoder();
for (int i = 0; i <= 1; i++) {
archiveIn = archiveIn.replace(
encoder.encodeToString(new byte[]{(byte) i}),
replacement
);
}
return archiveIn;
}
private static class TestArgs implements ArgumentsProvider {
@Override
public Stream<? extends org.junit.jupiter.params.provider.Arguments> provideArguments(ExtensionContext context) {
return Stream.of(
argsOf(
"json(w/o meta, compression:none) to csv(compression:none)",
ARCHIVE_0_JSON_BASE.formatted("null"),
ARCHIVE_0_CSV_COMPRESSION_NONE,
Map.of("out_tile_compression", "none")
),
argsOf(
"json(w/o meta, compression:none) to csv(compression:gzip)",
ARCHIVE_0_JSON_BASE.formatted("null"),
compressBase64(ARCHIVE_0_CSV_COMPRESSION_NONE),
Map.of("out_tile_compression", "gzip")
),
argsOf(
"json(w/o meta, compression:gzip) to csv(compression:none)",
compressBase64(ARCHIVE_0_JSON_BASE.formatted("null")),
ARCHIVE_0_CSV_COMPRESSION_NONE,
Map.of("out_tile_compression", "none")
),
argsOf(
"json(w/o meta, compression:gzip) to csv(compression:gzip)",
compressBase64(ARCHIVE_0_JSON_BASE.formatted("null")),
compressBase64(ARCHIVE_0_CSV_COMPRESSION_NONE),
Map.of("out_tile_compression", "gzip")
),
argsOf(
"json(w/o meta, compression:gzip) to csv(compression:gzip)",
compressBase64(ARCHIVE_0_JSON_BASE.formatted("null")),
compressBase64(ARCHIVE_0_CSV_COMPRESSION_NONE),
Map.of("out_tile_compression", "gzip")
),
argsOf(
"json(w/ meta, compression:gzip) to csv(compression:none)",
compressBase64(ARCHIVE_0_JSON_BASE.formatted(TestUtils.MAX_METADATA_SERIALIZED)),
ARCHIVE_0_CSV_COMPRESSION_NONE,
Map.of("out_tile_compression", "none")
),
argsOf(
"json(w/ meta, compression:gzip) to json(w/ meta, compression:gzip)",
compressBase64(ARCHIVE_0_JSON_BASE.formatted(TestUtils.MAX_METADATA_SERIALIZED)),
compressBase64(ARCHIVE_0_JSON_BASE.formatted(TestUtils.MAX_METADATA_SERIALIZED)),
Map.of()
),
argsOf(
"csv to json - use fallback metadata",
ARCHIVE_0_CSV_COMPRESSION_NONE,
ARCHIVE_0_JSON_BASE.formatted(
"{\"name\":\"unknown\",\"format\":\"pbf\",\"minzoom\":\"0\",\"maxzoom\":\"14\",\"json\":\"{\\\"vector_layers\\\":[]}\",\"compression\":\"none\"}"),
Map.of("out_tile_compression", "none")
),
argsOf(
"csv to json - use external metadata",
ARCHIVE_0_CSV_COMPRESSION_NONE,
ARCHIVE_0_JSON_BASE
.formatted("{\"name\":\"blub\",\"compression\":\"none\",\"minzoom\":\"0\",\"maxzoom\":\"14\"}"),
Map.of("out_tile_compression", "none", "in_metadata", "blub")
),
argsOf(
"csv to json - null handling",
replaceBase64(ARCHIVE_0_CSV_COMPRESSION_NONE, ""),
replaceBase64(ARCHIVE_0_JSON_BASE
.formatted("{\"name\":\"blub\",\"compression\":\"gzip\",\"minzoom\":\"0\",\"maxzoom\":\"14\"}"), "null")
.replace(",\"encodedData\":\"null\"", ""),
Map.of("in_metadata", "blub", "skip_empty", "false")
),
argsOf(
"json to csv - null handling",
replaceBase64(ARCHIVE_0_JSON_BASE.formatted("null"), "null")
.replace(",\"encodedData\":\"null\"", ""),
replaceBase64(ARCHIVE_0_CSV_COMPRESSION_NONE, ""),
Map.of("skip_empty", "false")
),
argsOf(
"csv to csv - empty skipping on",
"""
0,0,0,
1,2,3,AQ==
""",
"""
1,2,3,AQ==
""",
Map.of("skip_empty", "true", "out_tile_compression", "none")
),
argsOf(
"csv to csv - empty skipping off",
"""
0,0,0,
1,2,3,AQ==
""",
"""
0,0,0,
1,2,3,AQ==
""",
Map.of("skip_empty", "false", "out_tile_compression", "none")
),
argsOf(
"tiles in order",
"""
1,2,3,AQ==
0,0,0,AA==
""",
"""
1,2,3,AQ==
0,0,0,AA==
""",
Map.of("out_tile_compression", "none")
),
argsOf(
"tiles re-order",
"""
0,0,1,AQ==
0,0,0,AA==
""",
"""
0,0,0,AA==
0,0,1,AQ==
""",
Map.of("out_tile_compression", "none", "scan_tiles_in_order", "false", "filter_maxzoom", "1")
),
argsOf(
"filter min zoom",
"""
0,0,0,AA==
0,0,1,AQ==
""",
"""
0,0,1,AQ==
""",
Map.of("out_tile_compression", "none", "filter_minzoom", "1")
),
argsOf(
"filter max zoom",
"""
0,0,1,AQ==
0,0,0,AA==
""",
"""
0,0,0,AA==
""",
Map.of("out_tile_compression", "none", "filter_maxzoom", "0")
)
);
}
private static org.junit.jupiter.params.provider.Arguments argsOf(String testName, String archiveDataIn,
String archiveDataOut, Map<String, String> arguments) {
return org.junit.jupiter.params.provider.Arguments.of(testName, archiveDataIn, archiveDataOut, arguments);
}
}
}

Wyświetl plik

@ -49,8 +49,12 @@ class ReadableFilesArchiveTest {
Files.write(files.get(i), new byte[]{(byte) i});
}
try (var reader = ReadableFilesArchive.newReader(tilesDir, Arguments.of())) {
final List<Tile> tiles = reader.getAllTiles().stream().sorted().toList();
try (
var reader = ReadableFilesArchive.newReader(tilesDir, Arguments.of());
var t = reader.getAllTiles();
var s = t.stream()
) {
final List<Tile> tiles = s.sorted().toList();
assertEquals(
List.of(
new Tile(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}),

Wyświetl plik

@ -21,10 +21,13 @@ class PolygonIndexTest {
void testSingle() {
index.put(rectangle(0, 1), 1);
assertListsContainSameElements(List.of(1), index.getContaining(newPoint(0.5, 0.5)));
assertListsContainSameElements(List.of(1), index.getIntersecting(newPoint(0.5, 0.5)));
assertListsContainSameElements(List.of(1), index.getIntersecting(rectangle(1, 2)));
assertListsContainSameElements(List.of(1), index.getContainingOrNearest(newPoint(0.5, 0.5)));
assertListsContainSameElements(List.of(), index.getContaining(newPoint(1.5, 1.5)));
assertListsContainSameElements(List.of(), index.getContainingOrNearest(newPoint(1.5, 1.5)));
assertListsContainSameElements(List.of(), index.getIntersecting(rectangle(2, 3)));
}
@Test
@ -33,6 +36,9 @@ class PolygonIndexTest {
index.put(rectangle(0, 1), 2);
assertListsContainSameElements(List.of(1, 2), index.getContaining(newPoint(0.5, 0.5)));
assertListsContainSameElements(List.of(1, 2), index.getContainingOrNearest(newPoint(0.5, 0.5)));
assertListsContainSameElements(List.of(1, 2), index.getIntersecting(rectangle(0.5, 1.5)));
assertListsContainSameElements(List.of(1, 2), index.getIntersecting(rectangle(1, 2)));
assertListsContainSameElements(List.of(), index.getIntersecting(rectangle(2, 3)));
}
@Test

Wyświetl plik

@ -1,119 +0,0 @@
package com.onthegomap.planetiler.stream;
import com.onthegomap.planetiler.archive.ReadableTileArchive;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.archive.TileEncodingResult;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.proto.StreamArchiveProto;
import com.onthegomap.planetiler.util.CloseableIterator;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.OptionalLong;
public class InMemoryStreamArchive implements ReadableTileArchive {
private final List<TileEncodingResult> tileEncodings;
private final TileArchiveMetadata metadata;
private InMemoryStreamArchive(List<TileEncodingResult> tileEncodings, TileArchiveMetadata metadata) {
this.tileEncodings = tileEncodings;
this.metadata = metadata;
}
public static InMemoryStreamArchive fromCsv(Path p, String columnSepatator) throws IOException {
var base64Decoder = Base64.getDecoder();
final List<TileEncodingResult> tileEncodings = new ArrayList<>();
try (var reader = Files.newBufferedReader(p)) {
String line;
while ((line = reader.readLine()) != null) {
final String[] splits = line.split(columnSepatator);
final TileCoord tileCoord = TileCoord.ofXYZ(Integer.parseInt(splits[0]), Integer.parseInt(splits[1]),
Integer.parseInt(splits[2]));
tileEncodings.add(new TileEncodingResult(tileCoord, base64Decoder.decode(splits[3]), OptionalLong.empty()));
}
}
return new InMemoryStreamArchive(tileEncodings, null);
}
public static InMemoryStreamArchive fromProtobuf(Path p) throws IOException {
final List<TileEncodingResult> tileEncodings = new ArrayList<>();
try (var in = Files.newInputStream(p)) {
StreamArchiveProto.Entry entry;
while ((entry = StreamArchiveProto.Entry.parseDelimitedFrom(in)) != null) {
if (entry.getEntryCase() == StreamArchiveProto.Entry.EntryCase.TILE) {
final StreamArchiveProto.TileEntry tileProto = entry.getTile();
final TileCoord tileCoord = TileCoord.ofXYZ(tileProto.getX(), tileProto.getY(), tileProto.getZ());
tileEncodings
.add(new TileEncodingResult(tileCoord, tileProto.getEncodedData().toByteArray(), OptionalLong.empty()));
}
}
}
return new InMemoryStreamArchive(tileEncodings, null /* could add once the format is finalized*/);
}
public static InMemoryStreamArchive fromJson(Path p) throws IOException {
final List<TileEncodingResult> tileEncodings = new ArrayList<>();
final TileArchiveMetadata[] metadata = new TileArchiveMetadata[]{null};
try (var reader = Files.newBufferedReader(p)) {
WriteableJsonStreamArchive.jsonMapper
.readerFor(WriteableJsonStreamArchive.Entry.class)
.readValues(reader)
.forEachRemaining(entry -> {
if (entry instanceof WriteableJsonStreamArchive.TileEntry te) {
final TileCoord tileCoord = TileCoord.ofXYZ(te.x(), te.y(), te.z());
tileEncodings.add(new TileEncodingResult(tileCoord, te.encodedData(), OptionalLong.empty()));
} else if (entry instanceof WriteableJsonStreamArchive.FinishEntry fe) {
metadata[0] = fe.metadata();
}
});
}
return new InMemoryStreamArchive(tileEncodings, Objects.requireNonNull(metadata[0]));
}
@Override
public void close() throws IOException {}
@Override
public byte[] getTile(int x, int y, int z) {
final TileCoord coord = TileCoord.ofXYZ(x, y, z);
return tileEncodings.stream()
.filter(ter -> ter.coord().equals(coord)).findFirst()
.map(TileEncodingResult::tileData)
.orElse(null);
}
@Override
public CloseableIterator<TileCoord> getAllTileCoords() {
final Iterator<TileEncodingResult> it = tileEncodings.iterator();
return new CloseableIterator<TileCoord>() {
@Override
public TileCoord next() {
return it.next().coord();
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public void close() {}
};
}
@Override
public TileArchiveMetadata metadata() {
return metadata;
}
}

Wyświetl plik

@ -0,0 +1,79 @@
package com.onthegomap.planetiler.stream;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveConfig;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.geo.TileCoord;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import org.apache.commons.text.StringEscapeUtils;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
class ReadableCsvStreamArchiveTest {
@ParameterizedTest
@CsvSource(delimiter = '$', textBlock = """
,$\\n$false$BASE64
,$\\r$false$BASE64
,$\\r\\n$false$BASE64
,$x$false$BASE64
;$\\n$false$BASE64
{$\\n$false$BASE64
,${$false$BASE64
,$\\n$false$HEX
,$\\n$true$BASE64
""")
void testSimple(String columnSeparator, String lineSeparator, boolean pad, CsvBinaryEncoding encoding,
@TempDir Path tempDir) throws IOException {
final Path csvFile = tempDir.resolve("in.csv");
final String csv =
"""
0,0,0,AA==
1,2,3,AQ==
"""
.replace("\n", StringEscapeUtils.unescapeJava(lineSeparator))
.replace(",", columnSeparator + (pad ? " " : ""))
.replace("AA==", encoding == CsvBinaryEncoding.BASE64 ? "AA==" : "00")
.replace("AQ==", encoding == CsvBinaryEncoding.BASE64 ? "AQ==" : "01");
Files.writeString(csvFile, csv);
final StreamArchiveConfig config = new StreamArchiveConfig(
false,
Arguments.of(Map.of(
StreamArchiveUtils.CSV_OPTION_COLUMN_SEPARATOR, columnSeparator,
StreamArchiveUtils.CSV_OPTION_LINE_SEPARATOR, lineSeparator,
StreamArchiveUtils.CSV_OPTION_BINARY_ENCODING, encoding.id()
))
);
final List<Tile> expectedTiles = List.of(
new Tile(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}),
new Tile(TileCoord.ofXYZ(1, 2, 3), new byte[]{1})
);
try (var reader = ReadableCsvArchive.newReader(TileArchiveConfig.Format.CSV, csvFile, config)) {
try (var s = reader.getAllTiles().stream()) {
assertEquals(expectedTiles, s.toList());
}
try (var s = reader.getAllTiles().stream()) {
assertEquals(expectedTiles, s.toList());
}
assertNull(reader.metadata());
assertNull(reader.metadata());
assertArrayEquals(expectedTiles.get(1).bytes(), reader.getTile(TileCoord.ofXYZ(1, 2, 3)));
assertArrayEquals(expectedTiles.get(0).bytes(), reader.getTile(0, 0, 0));
assertNull(reader.getTile(4, 5, 6));
}
}
}

Wyświetl plik

@ -0,0 +1,52 @@
package com.onthegomap.planetiler.stream;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import com.onthegomap.planetiler.TestUtils;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.geo.TileCoord;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
class ReadableJsonStreamArchiveTest {
@Test
void testSimple(@TempDir Path tempDir) throws IOException {
final Path jsonFile = tempDir.resolve("in.json");
final String json = """
{"type":"initialization"}
{"type":"tile","x":0,"y":0,"z":0,"encodedData":"AA=="}
{"type":"tile","x":1,"y":2,"z":3,"encodedData":"AQ=="}
{"type":"finish","metadata":%s}
""".formatted(TestUtils.MAX_METADATA_SERIALIZED);
Files.writeString(jsonFile, json);
final StreamArchiveConfig config = new StreamArchiveConfig(false, Arguments.of());
final List<Tile> expectedTiles = List.of(
new Tile(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}),
new Tile(TileCoord.ofXYZ(1, 2, 3), new byte[]{1})
);
try (var reader = ReadableJsonStreamArchive.newReader(jsonFile, config)) {
try (var s = reader.getAllTiles().stream()) {
assertEquals(expectedTiles, s.toList());
}
try (var s = reader.getAllTiles().stream()) {
assertEquals(expectedTiles, s.toList());
}
assertEquals(TestUtils.MAX_METADATA_DESERIALIZED, reader.metadata());
assertEquals(TestUtils.MAX_METADATA_DESERIALIZED, reader.metadata());
assertArrayEquals(expectedTiles.get(1).bytes(), reader.getTile(TileCoord.ofXYZ(1, 2, 3)));
assertArrayEquals(expectedTiles.get(0).bytes(), reader.getTile(0, 0, 0));
assertNull(reader.getTile(4, 5, 6));
}
}
}

Wyświetl plik

@ -0,0 +1,65 @@
package com.onthegomap.planetiler.stream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import com.google.protobuf.ByteString;
import com.onthegomap.planetiler.archive.Tile;
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
import com.onthegomap.planetiler.config.Arguments;
import com.onthegomap.planetiler.geo.TileCoord;
import com.onthegomap.planetiler.proto.StreamArchiveProto;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class ReadableProtoStreamArchiveTest {
@ParameterizedTest
@ValueSource(booleans = {true, false})
void testSimple(boolean maxMetaData, @TempDir Path tempDir) throws IOException {
final StreamArchiveProto.Metadata metadataSerialized = maxMetaData ?
WriteableProtoStreamArchiveTest.maxMetadataSerialized : WriteableProtoStreamArchiveTest.minMetadataSerialized;
final TileArchiveMetadata metadataDeserialized = maxMetaData ?
WriteableProtoStreamArchiveTest.maxMetadataDeserialized : WriteableProtoStreamArchiveTest.minMetadataDeserialized;
final Path p = tempDir.resolve("out.proto");
try (var out = Files.newOutputStream(p)) {
StreamArchiveProto.Entry.newBuilder().setInitialization(
StreamArchiveProto.InitializationEntry.newBuilder()
).build().writeDelimitedTo(out);
StreamArchiveProto.Entry.newBuilder().setTile(
StreamArchiveProto.TileEntry.newBuilder()
.setX(0).setY(0).setZ(0).setEncodedData(ByteString.copyFrom(new byte[]{0}))
).build().writeDelimitedTo(out);
StreamArchiveProto.Entry.newBuilder().setTile(
StreamArchiveProto.TileEntry.newBuilder()
.setX(1).setY(2).setZ(3).setEncodedData(ByteString.copyFrom(new byte[]{1}))
).build().writeDelimitedTo(out);
StreamArchiveProto.Entry.newBuilder().setFinish(
StreamArchiveProto.FinishEntry.newBuilder()
.setMetadata(metadataSerialized)
).build().writeDelimitedTo(out);
}
final List<Tile> expectedTiles = List.of(
new Tile(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}),
new Tile(TileCoord.ofXYZ(1, 2, 3), new byte[]{1})
);
try (var reader = ReadableProtoStreamArchive.newReader(p, new StreamArchiveConfig(false, Arguments.of()))) {
try (var s = reader.getAllTiles().stream()) {
assertEquals(expectedTiles, s.toList());
}
try (var s = reader.getAllTiles().stream()) {
assertEquals(expectedTiles, s.toList());
}
assertEquals(metadataDeserialized, reader.metadata());
assertEquals(metadataDeserialized, reader.metadata());
}
}
}

Wyświetl plik

@ -113,7 +113,7 @@ class WriteableCsvArchiveTest {
void testColumnSeparator(@TempDir Path tempDir) throws IOException {
final StreamArchiveConfig config =
new StreamArchiveConfig(false, Arguments.of(Map.of(WriteableCsvArchive.OPTION_COLUMN_SEPARATOR, "' '")));
new StreamArchiveConfig(false, Arguments.of(Map.of(StreamArchiveUtils.CSV_OPTION_COLUMN_SEPARATOR, "' '")));
final String expectedCsv =
"""
@ -128,7 +128,7 @@ class WriteableCsvArchiveTest {
void testLineSeparator(@TempDir Path tempDir) throws IOException {
final StreamArchiveConfig config =
new StreamArchiveConfig(false, Arguments.of(Map.of(WriteableCsvArchive.OPTION_LINE_SEPARTATOR, "'\\r'")));
new StreamArchiveConfig(false, Arguments.of(Map.of(StreamArchiveUtils.CSV_OPTION_LINE_SEPARATOR, "'\\r'")));
final String expectedCsv =
"""
@ -143,7 +143,7 @@ class WriteableCsvArchiveTest {
void testHexEncoding(@TempDir Path tempDir) throws IOException {
final StreamArchiveConfig config =
new StreamArchiveConfig(false, Arguments.of(Map.of(WriteableCsvArchive.OPTION_BINARY_ENCODING, "hex")));
new StreamArchiveConfig(false, Arguments.of(Map.of(StreamArchiveUtils.CSV_OPTION_BINARY_ENCODING, "hex")));
final String expectedCsv =
"""

Wyświetl plik

@ -28,8 +28,8 @@ import org.locationtech.jts.geom.Envelope;
class WriteableProtoStreamArchiveTest {
private static final StreamArchiveConfig defaultConfig = new StreamArchiveConfig(false, null);
private static final TileArchiveMetadata maxMetadataIn =
static final StreamArchiveConfig defaultConfig = new StreamArchiveConfig(false, null);
static final TileArchiveMetadata maxMetadataDeserialized =
new TileArchiveMetadata("name", "description", "attribution", "version", "type", "format", new Envelope(0, 1, 2, 3),
new Coordinate(1.3, 3.7, 1.0), 2, 3,
TileArchiveMetadata.TileArchiveMetadataJson.create(
@ -45,7 +45,7 @@ class WriteableProtoStreamArchiveTest {
),
Map.of("a", "b", "c", "d"),
TileCompression.GZIP);
private static final StreamArchiveProto.Metadata maxMetadataOut = StreamArchiveProto.Metadata.newBuilder()
static final StreamArchiveProto.Metadata maxMetadataSerialized = StreamArchiveProto.Metadata.newBuilder()
.setName("name").setDescription("description").setAttribution("attribution").setVersion("version")
.setType("type").setFormat("format")
.setBounds(StreamArchiveProto.Envelope.newBuilder().setMinX(0).setMaxX(1).setMinY(2).setMaxY(3).build())
@ -64,10 +64,10 @@ class WriteableProtoStreamArchiveTest {
.setTileCompression(StreamArchiveProto.TileCompression.TILE_COMPRESSION_GZIP)
.build();
private static final TileArchiveMetadata minMetadataIn =
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null,
static final TileArchiveMetadata minMetadataDeserialized =
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, Map.of(),
TileCompression.NONE);
private static final StreamArchiveProto.Metadata minMetadataOut = StreamArchiveProto.Metadata.newBuilder()
static final StreamArchiveProto.Metadata minMetadataSerialized = StreamArchiveProto.Metadata.newBuilder()
.setTileCompression(StreamArchiveProto.TileCompression.TILE_COMPRESSION_NONE)
.build();
@ -83,12 +83,12 @@ class WriteableProtoStreamArchiveTest {
tileWriter.write(tile0);
tileWriter.write(tile1);
}
archive.finish(minMetadataIn);
archive.finish(minMetadataDeserialized);
}
try (InputStream in = Files.newInputStream(csvFile)) {
assertEquals(
List.of(wrapInit(), toEntry(tile0), toEntry(tile1), wrapFinish(minMetadataOut)),
List.of(wrapInit(), toEntry(tile0), toEntry(tile1), wrapFinish(minMetadataSerialized)),
readAllEntries(in)
);
}
@ -119,12 +119,12 @@ class WriteableProtoStreamArchiveTest {
try (var tileWriter = archive.newTileWriter()) {
tileWriter.write(tile4);
}
archive.finish(maxMetadataIn);
archive.finish(maxMetadataDeserialized);
}
try (InputStream in = Files.newInputStream(csvFilePrimary)) {
assertEquals(
List.of(wrapInit(), toEntry(tile0), toEntry(tile1), wrapFinish(maxMetadataOut)),
List.of(wrapInit(), toEntry(tile0), toEntry(tile1), wrapFinish(maxMetadataSerialized)),
readAllEntries(in)
);
}

Wyświetl plik

@ -0,0 +1,36 @@
package com.onthegomap.planetiler.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.List;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
class CloseableIteratorTest {
@Test
void testFilter() {
assertEquals(
List.of(2, 4),
CloseableIterator.of(Stream.of(1, 2, 3, 4, 5, 6))
.filter(i -> i == 2 || i == 4)
.stream()
.toList()
);
assertEquals(
List.of(),
CloseableIterator.of(Stream.of(100, 99, 98))
.filter(i -> i == 2 || i == 4)
.stream()
.toList()
);
assertEquals(
List.of(),
CloseableIterator.of(Stream.<Integer>of())
.filter(i -> i == 2 || i == 4)
.stream()
.toList()
);
}
}

Wyświetl plik

@ -4,6 +4,7 @@ import static java.util.Map.entry;
import com.onthegomap.planetiler.benchmarks.LongLongMapBench;
import com.onthegomap.planetiler.benchmarks.OpenMapTilesMapping;
import com.onthegomap.planetiler.copy.TileCopy;
import com.onthegomap.planetiler.custommap.ConfiguredMapMain;
import com.onthegomap.planetiler.custommap.validator.SchemaValidator;
import com.onthegomap.planetiler.examples.BikeRouteOverlay;
@ -65,7 +66,9 @@ public class Main {
entry("verify-monaco", VerifyMonaco::main),
entry("stats", TileSizeStats::main),
entry("top-osm-tiles", TopOsmTiles::main),
entry("compare", CompareArchives::main)
entry("compare", CompareArchives::main),
entry("tile-copy", TileCopy::main)
);
private static EntryPoint bundledSchema(String path) {

Wyświetl plik

@ -313,7 +313,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.3.0</version>
<version>3.3.1</version>
<executions>
<execution>
<id>attach-sources</id>
@ -394,7 +394,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-gpg-plugin</artifactId>
<version>3.2.2</version>
<version>3.2.3</version>
<executions>
<execution>
<id>sign-artifacts</id>