kopia lustrzana https://github.com/onthegomap/planetiler
Add support for "files"-archive (#761)
* Add support for "files"-archive i.e. write individual pbf-files to disk in the format <base>/z/x/y.pbf in order to use that format it must be passed as "--ouput=/path/to/tiles?format=files" Fixes #536 * default to files format ...if no explict format query param given, path ends with a slash, or no extension given * output metadata.json in files-archive and refactor TileArchiveMetadata 1. put zoom into center (CoordinateXY->Coordinate) - in sync with mbtiles-format 2. add (De-)Serializer for Coordinate+Envelop => avoid duplication and cleaner 3. change the json and proto output for TileArchiveMetadata to be (more) in sync with mbtiles-format * add support for custom tile scheme in files-archive {z}/{x}/{y}.pbf is the default and can be configured as needed - e.g.: - different order: {x}/{y}/{z}.pbf - with intermediate dirs: {x}/a/{y}/b/{z}.pbf - with different extension: {z}/{y}/{y}.pbf.gz instead of {x} and {y}, {xs} and {xy} can be used which breaks up x and y into 2 directories each and ensures that each directory has <1000 children * fix issues with multiple writers 1. call finish archive only once after all writers are finished ...and not every time a writer finishes 2. log "zoom-progress" for the first tile write only (Finished z11 ... now starting z12) 3. remove file/dir-size progress logger bottleneck for files archive => each archive now reports the bytes written, which also fixes the issues of stream-archives reporting the size incorrectly 4. introduce printStats-hook on archive-level * add async file write support to files archive ...allow to use virtual threads ExecturService (bound only!) for tile writing also add some benchmark for writing tiles to disk: fixed, bound virtual, async, unbound virtual * Revert "add async file write support to files archive" This reverts commit b8cfa56977d98520aa8b62252c3a2726d440afe0. * few improvements - extract TileSchemeEncoding - use Counter.MultithreadCounter rather than LongAdder to count bytes written - add some JavaDoc * simplify files archive usage 1. allow to pass tile scheme directly via output: --output=tiles/{x}/{y}/{z}.pbf 2. auto-encode { (%7B) and } (%7D) => no need to encode it the URI on CLI * few more adjustments according to PR feeback 1. use WriteableTileArchive#bytesWritten in summmary as well 2. call WriteableTileArchive#init in a safer manner ..and a few more adjustments * more PR feedbackpull/774/head
rodzic
389ccab8e1
commit
c480b35f1c
|
@ -19,7 +19,6 @@ import com.onthegomap.planetiler.reader.osm.OsmReader;
|
|||
import com.onthegomap.planetiler.stats.ProcessInfo;
|
||||
import com.onthegomap.planetiler.stats.Stats;
|
||||
import com.onthegomap.planetiler.stats.Timers;
|
||||
import com.onthegomap.planetiler.stream.StreamArchiveUtils;
|
||||
import com.onthegomap.planetiler.util.AnsiColors;
|
||||
import com.onthegomap.planetiler.util.BuildInfo;
|
||||
import com.onthegomap.planetiler.util.ByteBufferUtil;
|
||||
|
@ -683,15 +682,15 @@ public class Planetiler {
|
|||
throw new IllegalArgumentException(output.format() + " doesn't support concurrent writes");
|
||||
}
|
||||
IntStream.range(1, config.tileWriteThreads())
|
||||
.mapToObj(index -> StreamArchiveUtils.constructIndexedPath(output.getLocalPath(), index))
|
||||
.mapToObj(output::getPathForMultiThreadedWriter)
|
||||
.forEach(p -> {
|
||||
if (!config.append() && (overwrite || config.force())) {
|
||||
FileUtils.delete(p);
|
||||
}
|
||||
if (config.append() && !Files.exists(p)) {
|
||||
throw new IllegalArgumentException("indexed file \"" + p + "\" must exist when appending");
|
||||
} else if (!config.append() && Files.exists(p)) {
|
||||
throw new IllegalArgumentException("indexed file \"" + p + "\" must not exist when not appending");
|
||||
if (config.append() && !output.exists(p)) {
|
||||
throw new IllegalArgumentException("indexed archive \"" + p + "\" must exist when appending");
|
||||
} else if (!config.append() && output.exists(p)) {
|
||||
throw new IllegalArgumentException("indexed archive \"" + p + "\" must not exist when not appending");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -719,7 +718,7 @@ public class Planetiler {
|
|||
// in case any temp files are left from a previous run...
|
||||
FileUtils.delete(tmpDir, nodeDbPath, featureDbPath, multipolygonPath);
|
||||
Files.createDirectories(tmpDir);
|
||||
FileUtils.createParentDirectories(nodeDbPath, featureDbPath, multipolygonPath, output.getLocalPath());
|
||||
FileUtils.createParentDirectories(nodeDbPath, featureDbPath, multipolygonPath, output.getLocalBasePath());
|
||||
|
||||
if (!toDownload.isEmpty()) {
|
||||
download();
|
||||
|
@ -757,7 +756,7 @@ public class Planetiler {
|
|||
stats.monitorFile("nodes", nodeDbPath);
|
||||
stats.monitorFile("features", featureDbPath);
|
||||
stats.monitorFile("multipolygons", multipolygonPath);
|
||||
stats.monitorFile("archive", output.getLocalPath());
|
||||
stats.monitorFile("archive", output.getLocalPath(), archive::bytesWritten);
|
||||
|
||||
for (Stage stage : stages) {
|
||||
stage.task.run();
|
||||
|
@ -774,8 +773,8 @@ public class Planetiler {
|
|||
|
||||
featureGroup.prepare();
|
||||
|
||||
TileArchiveWriter.writeOutput(featureGroup, archive, output::size, tileArchiveMetadata, layerStatsPath, config,
|
||||
stats);
|
||||
TileArchiveWriter.writeOutput(featureGroup, archive, archive::bytesWritten, tileArchiveMetadata, layerStatsPath,
|
||||
config, stats);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Unable to write to " + output, e);
|
||||
}
|
||||
|
|
|
@ -3,7 +3,11 @@ package com.onthegomap.planetiler.archive;
|
|||
import static com.onthegomap.planetiler.util.LanguageUtils.nullIfEmpty;
|
||||
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.files.FilesArchiveUtils;
|
||||
import com.onthegomap.planetiler.stream.StreamArchiveUtils;
|
||||
import com.onthegomap.planetiler.util.FileUtils;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.net.URI;
|
||||
import java.net.URLDecoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -11,6 +15,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
* Definition for a tileset, parsed from a URI-like string.
|
||||
|
@ -39,6 +44,12 @@ public record TileArchiveConfig(
|
|||
Map<String, String> options
|
||||
) {
|
||||
|
||||
// be more generous and encode some characters for the users
|
||||
private static final Map<String, String> URI_ENCODINGS = Map.of(
|
||||
"{", "%7B",
|
||||
"}", "%7D"
|
||||
);
|
||||
|
||||
private static TileArchiveConfig.Scheme getScheme(URI uri) {
|
||||
String scheme = uri.getScheme();
|
||||
if (scheme == null) {
|
||||
|
@ -77,18 +88,20 @@ public record TileArchiveConfig(
|
|||
|
||||
private static TileArchiveConfig.Format getFormat(URI uri) {
|
||||
String format = parseQuery(uri).get("format");
|
||||
if (format == null) {
|
||||
format = getExtension(uri);
|
||||
}
|
||||
if (format == null) {
|
||||
return TileArchiveConfig.Format.MBTILES;
|
||||
}
|
||||
for (var value : TileArchiveConfig.Format.values()) {
|
||||
if (value.id().equals(format)) {
|
||||
if (value.isQueryFormatSupported(format)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported format " + format + " from " + uri);
|
||||
if (format != null) {
|
||||
throw new IllegalArgumentException("Unsupported format " + format + " from " + uri);
|
||||
}
|
||||
for (var value : TileArchiveConfig.Format.values()) {
|
||||
if (value.isUriSupported(uri)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported format " + getExtension(uri) + " from " + uri);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -103,6 +116,10 @@ public record TileArchiveConfig(
|
|||
string += "?" + parts[1];
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, String> uriEncoding : URI_ENCODINGS.entrySet()) {
|
||||
string = string.replace(uriEncoding.getKey(), uriEncoding.getValue());
|
||||
}
|
||||
|
||||
return from(URI.create(string));
|
||||
}
|
||||
|
||||
|
@ -111,7 +128,11 @@ public record TileArchiveConfig(
|
|||
*/
|
||||
public static TileArchiveConfig from(URI uri) {
|
||||
if (uri.getScheme() == null) {
|
||||
String base = Path.of(uri.getPath()).toAbsolutePath().toUri().normalize().toString();
|
||||
final String path = uri.getPath();
|
||||
String base = Path.of(path).toAbsolutePath().toUri().normalize().toString();
|
||||
if (path.endsWith("/")) {
|
||||
base = base + "/";
|
||||
}
|
||||
if (uri.getRawQuery() != null) {
|
||||
base += "?" + uri.getRawQuery();
|
||||
}
|
||||
|
@ -133,13 +154,24 @@ public record TileArchiveConfig(
|
|||
return scheme == Scheme.FILE ? Path.of(URI.create(uri.toString().replaceAll("\\?.*$", ""))) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the local <b>base</b> path for this archive, for which directories should be pre-created for.
|
||||
*/
|
||||
public Path getLocalBasePath() {
|
||||
Path p = getLocalPath();
|
||||
if (format() == Format.FILES) {
|
||||
p = FilesArchiveUtils.cleanBasePath(p);
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Deletes the archive if possible.
|
||||
*/
|
||||
public void delete() {
|
||||
if (scheme == Scheme.FILE) {
|
||||
FileUtils.delete(getLocalPath());
|
||||
FileUtils.delete(getLocalBasePath());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,7 +179,30 @@ public record TileArchiveConfig(
|
|||
* Returns {@code true} if the archive already exists, {@code false} otherwise.
|
||||
*/
|
||||
public boolean exists() {
|
||||
return getLocalPath() != null && Files.exists(getLocalPath());
|
||||
return exists(getLocalBasePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param p path to the archive
|
||||
* @return {@code true} if the archive already exists, {@code false} otherwise.
|
||||
*/
|
||||
public boolean exists(Path p) {
|
||||
if (p == null) {
|
||||
return false;
|
||||
}
|
||||
if (format() != Format.FILES) {
|
||||
return Files.exists(p);
|
||||
} else {
|
||||
if (!Files.exists(p)) {
|
||||
return false;
|
||||
}
|
||||
// file-archive exists only if it has any contents
|
||||
try (Stream<Path> paths = Files.list(p)) {
|
||||
return paths.findAny().isPresent();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -165,12 +220,30 @@ public record TileArchiveConfig(
|
|||
return Arguments.of(options).orElse(arguments.withPrefix(format.id));
|
||||
}
|
||||
|
||||
public Path getPathForMultiThreadedWriter(int index) {
|
||||
return switch (format) {
|
||||
case CSV, TSV, JSON, PROTO, PBF -> StreamArchiveUtils.constructIndexedPath(getLocalPath(), index);
|
||||
case FILES -> getLocalPath();
|
||||
default -> throw new UnsupportedOperationException("not supported by " + format);
|
||||
};
|
||||
}
|
||||
|
||||
public enum Format {
|
||||
MBTILES("mbtiles",
|
||||
false /* TODO mbtiles could support append in the future by using insert statements with an "on conflict"-clause (i.e. upsert) and by creating tables only if they don't exist, yet */,
|
||||
false),
|
||||
PMTILES("pmtiles", false, false),
|
||||
|
||||
// should be before PBF in order to avoid collisions
|
||||
FILES("files", true, true) {
|
||||
@Override
|
||||
boolean isUriSupported(URI uri) {
|
||||
final String path = uri.getPath();
|
||||
return path != null && (path.endsWith("/") || path.contains("{") /* template string */ ||
|
||||
!path.contains(".") /* no extension => assume files */);
|
||||
}
|
||||
},
|
||||
|
||||
CSV("csv", true, true),
|
||||
/** identical to {@link Format#CSV} - except for the column separator */
|
||||
TSV("tsv", true, true),
|
||||
|
@ -202,6 +275,15 @@ public record TileArchiveConfig(
|
|||
public boolean supportsConcurrentWrites() {
|
||||
return supportsConcurrentWrites;
|
||||
}
|
||||
|
||||
boolean isUriSupported(URI uri) {
|
||||
final String path = uri.getPath();
|
||||
return path != null && path.endsWith("." + id);
|
||||
}
|
||||
|
||||
boolean isQueryFormatSupported(String queryFormat) {
|
||||
return id.equals(queryFormat);
|
||||
}
|
||||
}
|
||||
|
||||
public enum Scheme {
|
||||
|
|
|
@ -1,36 +1,34 @@
|
|||
package com.onthegomap.planetiler.archive;
|
||||
|
||||
import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_ABSENT;
|
||||
import static com.onthegomap.planetiler.util.Format.joinCoordinates;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAnyGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonAnySetter;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonDeserializer;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
|
||||
import com.onthegomap.planetiler.Profile;
|
||||
import com.onthegomap.planetiler.config.PlanetilerConfig;
|
||||
import com.onthegomap.planetiler.geo.GeoUtils;
|
||||
import com.onthegomap.planetiler.util.BuildInfo;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/** Metadata associated with a tile archive. */
|
||||
/**
|
||||
* Metadata associated with a tile archive.
|
||||
* <p>
|
||||
* The default (de-)serialization corresponds to the
|
||||
* <a href="https://github.com/mapbox/mbtiles-spec/blob/master/1.3/spec.md#metadata">mbtiles spec</a>. As such each
|
||||
* value is a string.
|
||||
*/
|
||||
public record TileArchiveMetadata(
|
||||
@JsonProperty(NAME_KEY) String name,
|
||||
@JsonProperty(DESCRIPTION_KEY) String description,
|
||||
|
@ -38,13 +36,19 @@ public record TileArchiveMetadata(
|
|||
@JsonProperty(VERSION_KEY) String version,
|
||||
@JsonProperty(TYPE_KEY) String type,
|
||||
@JsonProperty(FORMAT_KEY) String format,
|
||||
@JsonIgnore Envelope bounds,
|
||||
@JsonIgnore CoordinateXY center,
|
||||
@JsonProperty(ZOOM_KEY) Double zoom,
|
||||
@JsonProperty(MINZOOM_KEY) Integer minzoom,
|
||||
@JsonProperty(MAXZOOM_KEY) Integer maxzoom,
|
||||
@JsonIgnore List<LayerAttrStats.VectorLayer> vectorLayers,
|
||||
@JsonAnyGetter @JsonDeserialize(using = EmptyMapIfNullDeserializer.class) Map<String, String> others,
|
||||
@JsonSerialize(using = TileArchiveMetadataDeSer.EnvelopeSerializer.class)
|
||||
@JsonDeserialize(using = TileArchiveMetadataDeSer.EnvelopeDeserializer.class) Envelope bounds,
|
||||
@JsonSerialize(using = TileArchiveMetadataDeSer.CoordinateSerializer.class)
|
||||
@JsonDeserialize(using = TileArchiveMetadataDeSer.CoordinateDeserializer.class) Coordinate center,
|
||||
@JsonProperty(MINZOOM_KEY)
|
||||
@JsonSerialize(using = ToStringSerializer.class) Integer minzoom,
|
||||
@JsonProperty(MAXZOOM_KEY)
|
||||
@JsonSerialize(using = ToStringSerializer.class) Integer maxzoom,
|
||||
@JsonProperty(JSON_KEY)
|
||||
@JsonSerialize(using = TileArchiveMetadataDeSer.MetadataJsonSerializer.class)
|
||||
@JsonDeserialize(using = TileArchiveMetadataDeSer.MetadataJsonDeserializer.class) TileArchiveMetadataJson json,
|
||||
@JsonAnyGetter
|
||||
@JsonDeserialize(using = TileArchiveMetadataDeSer.EmptyMapIfNullDeserializer.class) Map<String, String> others,
|
||||
@JsonProperty(COMPRESSION_KEY) TileCompression tileCompression
|
||||
) {
|
||||
|
||||
|
@ -62,12 +66,11 @@ public record TileArchiveMetadata(
|
|||
public static final String VECTOR_LAYERS_KEY = "vector_layers";
|
||||
public static final String COMPRESSION_KEY = "compression";
|
||||
|
||||
public static final String JSON_KEY = "json";
|
||||
|
||||
public static final String MVT_FORMAT = "pbf";
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(TileArchiveMetadata.class);
|
||||
private static final ObjectMapper mapper = new ObjectMapper()
|
||||
.registerModules(new Jdk8Module())
|
||||
.setSerializationInclusion(NON_ABSENT);
|
||||
|
||||
public TileArchiveMetadata(Profile profile, PlanetilerConfig config) {
|
||||
this(profile, config, null);
|
||||
|
@ -82,16 +85,37 @@ public record TileArchiveMetadata(
|
|||
getString(config, TYPE_KEY, profile.isOverlay() ? "overlay" : "baselayer"),
|
||||
getString(config, FORMAT_KEY, MVT_FORMAT),
|
||||
config.bounds().latLon(),
|
||||
new CoordinateXY(config.bounds().latLon().centre()),
|
||||
GeoUtils.getZoomFromLonLatBounds(config.bounds().latLon()),
|
||||
new Coordinate(
|
||||
config.bounds().latLon().centre().getX(),
|
||||
config.bounds().latLon().centre().getY(),
|
||||
GeoUtils.getZoomFromLonLatBounds(config.bounds().latLon())
|
||||
),
|
||||
config.minzoom(),
|
||||
config.maxzoom(),
|
||||
vectorLayers,
|
||||
vectorLayers == null ? null : new TileArchiveMetadataJson(vectorLayers),
|
||||
mapWithBuildInfo(),
|
||||
config.tileCompression()
|
||||
);
|
||||
}
|
||||
|
||||
// just used for the "internal map"-serialization - ignored by default
|
||||
@JsonIgnore
|
||||
@JsonProperty(ZOOM_KEY)
|
||||
public Double zoom() {
|
||||
if (center == null) {
|
||||
return null;
|
||||
}
|
||||
final double z = center.getZ();
|
||||
return Double.isNaN(z) ? null : z;
|
||||
}
|
||||
|
||||
// just used for the "internal map"-serialization - ignored by default
|
||||
@JsonIgnore
|
||||
@JsonProperty(VECTOR_LAYERS_KEY)
|
||||
public List<LayerAttrStats.VectorLayer> vectorLayers() {
|
||||
return json == null ? null : json.vectorLayers;
|
||||
}
|
||||
|
||||
private static String getString(PlanetilerConfig config, String key, String fallback) {
|
||||
return config.arguments()
|
||||
.getString("archive_" + key + "|mbtiles_" + key, "'" + key + "' attribute for tileset metadata", fallback);
|
||||
|
@ -127,27 +151,22 @@ public record TileArchiveMetadata(
|
|||
* keys.
|
||||
*/
|
||||
public Map<String, String> toMap() {
|
||||
Map<String, String> result = new LinkedHashMap<>(mapper.convertValue(this, new TypeReference<>() {}));
|
||||
if (bounds != null) {
|
||||
result.put(BOUNDS_KEY, joinCoordinates(bounds.getMinX(), bounds.getMinY(), bounds.getMaxX(), bounds.getMaxY()));
|
||||
}
|
||||
if (center != null) {
|
||||
result.put(CENTER_KEY, joinCoordinates(center.getX(), center.getY()));
|
||||
}
|
||||
if (vectorLayers != null) {
|
||||
try {
|
||||
result.put(VECTOR_LAYERS_KEY, mapper.writeValueAsString(vectorLayers));
|
||||
} catch (JsonProcessingException e) {
|
||||
LOGGER.warn("Error encoding vector_layers as json", e);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
final JsonMapper mapper = TileArchiveMetadataDeSer.internalMapMapper();
|
||||
return new LinkedHashMap<>(mapper.convertValue(this, new TypeReference<>() {}));
|
||||
}
|
||||
|
||||
/** Returns a copy of this instance with {@link #vectorLayers} set to {@code layerStats}. */
|
||||
/** Returns a copy of this instance with {@link #json} set to {@code layerStats}. */
|
||||
public TileArchiveMetadata withLayerStats(List<LayerAttrStats.VectorLayer> layerStats) {
|
||||
return new TileArchiveMetadata(name, description, attribution, version, type, format, bounds, center, zoom, minzoom,
|
||||
maxzoom, layerStats, others, tileCompression);
|
||||
return withJson(json == null ? TileArchiveMetadataJson.create(layerStats) : json.withLayers(layerStats));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this instance with {@link #json}'s {@link TileArchiveMetadataJson#vectorLayers()} set to
|
||||
* {@code layerStats}.
|
||||
*/
|
||||
public TileArchiveMetadata withJson(TileArchiveMetadataJson json) {
|
||||
return new TileArchiveMetadata(name, description, attribution, version, type, format, bounds, center, minzoom,
|
||||
maxzoom, json, others, tileCompression);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -161,16 +180,16 @@ public record TileArchiveMetadata(
|
|||
others.put(name, value);
|
||||
}
|
||||
|
||||
private static class EmptyMapIfNullDeserializer extends JsonDeserializer<Map<String, String>> {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Map<String, String> deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
|
||||
return p.readValueAs(HashMap.class);
|
||||
|
||||
public record TileArchiveMetadataJson(
|
||||
@JsonProperty(VECTOR_LAYERS_KEY) List<LayerAttrStats.VectorLayer> vectorLayers
|
||||
) {
|
||||
public TileArchiveMetadataJson withLayers(List<LayerAttrStats.VectorLayer> vectorLayers) {
|
||||
return TileArchiveMetadataJson.create(vectorLayers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getNullValue(DeserializationContext ctxt) {
|
||||
return new HashMap<>();
|
||||
public static TileArchiveMetadataJson create(List<LayerAttrStats.VectorLayer> vectorLayers) {
|
||||
return vectorLayers == null ? null : new TileArchiveMetadataJson(vectorLayers);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,239 @@
|
|||
package com.onthegomap.planetiler.archive;
|
||||
|
||||
import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_ABSENT;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.ObjectCodec;
|
||||
import com.fasterxml.jackson.databind.BeanProperty;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonDeserializer;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.JsonSerializer;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import com.fasterxml.jackson.databind.deser.ContextualDeserializer;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
|
||||
import com.onthegomap.planetiler.util.Format;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
/**
|
||||
* Container for everything related to (de-)serialization of {@link TileArchiveMetadata}
|
||||
*/
|
||||
public final class TileArchiveMetadataDeSer {
|
||||
|
||||
private TileArchiveMetadataDeSer() {}
|
||||
|
||||
private static final JsonMapper internalMapMapper = newBaseBuilder()
|
||||
.addMixIn(TileArchiveMetadata.class, InternalMapMixin.class)
|
||||
.build();
|
||||
|
||||
private static final JsonMapper mbtilesMapper = newBaseBuilder()
|
||||
.build();
|
||||
|
||||
public static JsonMapper internalMapMapper() {
|
||||
return internalMapMapper;
|
||||
}
|
||||
|
||||
public static JsonMapper mbtilesMapper() {
|
||||
return mbtilesMapper;
|
||||
}
|
||||
|
||||
public static JsonMapper.Builder newBaseBuilder() {
|
||||
return JsonMapper.builder()
|
||||
.addModule(new Jdk8Module())
|
||||
.serializationInclusion(NON_ABSENT);
|
||||
}
|
||||
|
||||
public record InternalMapMixin(
|
||||
@JsonIgnore(true) TileArchiveMetadata.TileArchiveMetadataJson json,
|
||||
@JsonIgnore(false)
|
||||
@JsonSerialize(using = VectorLayersToStringSerializer.class) List<LayerAttrStats.VectorLayer> vectorLayers,
|
||||
@JsonIgnore(false) Double zoom,
|
||||
@JsonSerialize(using = CoordinateXYSerializer.class) Coordinate center
|
||||
) {}
|
||||
|
||||
public record StrictDeserializationMixin(
|
||||
@StrictDeserialization Coordinate center,
|
||||
@StrictDeserialization Envelope bounds
|
||||
) {}
|
||||
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.FIELD})
|
||||
public @interface StrictDeserialization {
|
||||
}
|
||||
|
||||
private static boolean isStrictDeserialization(BeanProperty property) {
|
||||
return Optional.ofNullable(property.getAnnotation(StrictDeserialization.class))
|
||||
.or(() -> Optional.ofNullable(property.getContextAnnotation(StrictDeserialization.class)))
|
||||
.map(a -> Boolean.TRUE)
|
||||
.orElse(false);
|
||||
}
|
||||
|
||||
private static void serializeEscapedJson(Object value, JsonGenerator gen) throws IOException {
|
||||
final ObjectCodec codec = gen.getCodec();
|
||||
final StringWriter writer = new StringWriter();
|
||||
final JsonGenerator subGen = gen.getCodec().getFactory().createGenerator(writer);
|
||||
codec.writeValue(subGen, value);
|
||||
final String escapedJson = writer.toString();
|
||||
gen.writeString(escapedJson);
|
||||
}
|
||||
|
||||
private static Optional<List<Double>> doubleListFromCommaList(String commaList, int minItems, int maxItems,
|
||||
boolean strict) {
|
||||
final String[] splits = commaList.split(",");
|
||||
if (splits.length < minItems) {
|
||||
if (strict) {
|
||||
throw new IllegalArgumentException("expected at least " + minItems + " doubles");
|
||||
} else {
|
||||
return Optional.empty();
|
||||
}
|
||||
} else if (splits.length > 3 && strict) {
|
||||
throw new IllegalArgumentException("expected at most " + maxItems + " doubles");
|
||||
}
|
||||
return Optional.of(Arrays.stream(splits)
|
||||
.limit(maxItems)
|
||||
.map(Double::parseDouble)
|
||||
.toList());
|
||||
}
|
||||
|
||||
static class EnvelopeSerializer extends JsonSerializer<Envelope> {
|
||||
|
||||
@Override
|
||||
public void serialize(Envelope v, JsonGenerator gen, SerializerProvider provider) throws IOException {
|
||||
gen.writeString(Format.joinCoordinates(v.getMinX(), v.getMinY(), v.getMaxX(), v.getMaxY()));
|
||||
}
|
||||
}
|
||||
|
||||
static class EnvelopeDeserializer extends JsonDeserializer<Envelope> implements ContextualDeserializer {
|
||||
|
||||
private boolean strict = false;
|
||||
|
||||
@Override
|
||||
public Envelope deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
|
||||
var dsOption = doubleListFromCommaList(p.getValueAsString(), 4, 4, strict);
|
||||
if (dsOption.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
final List<Double> ds = dsOption.get();
|
||||
final double minX = ds.get(0);
|
||||
final double maxX = ds.get(2);
|
||||
final double minY = ds.get(1);
|
||||
final double maxY = ds.get(3);
|
||||
return new Envelope(minX, maxX, minY, maxY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonDeserializer<?> createContextual(DeserializationContext ctxt, BeanProperty property)
|
||||
throws JsonMappingException {
|
||||
|
||||
strict = isStrictDeserialization(property);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class CoordinateSerializer extends JsonSerializer<Coordinate> {
|
||||
|
||||
@Override
|
||||
public void serialize(Coordinate v, JsonGenerator gen, SerializerProvider provider) throws IOException {
|
||||
if (Double.isNaN(v.getZ())) {
|
||||
gen.writeString(Format.joinCoordinates(v.getX(), v.getY()));
|
||||
} else {
|
||||
gen.writeString(Format.joinCoordinates(v.getX(), v.getY(), Math.ceil(v.getZ())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class CoordinateXYSerializer extends JsonSerializer<Coordinate> {
|
||||
@Override
|
||||
public void serialize(Coordinate v, JsonGenerator gen, SerializerProvider provider) throws IOException {
|
||||
gen.writeString(Format.joinCoordinates(v.getX(), v.getY()));
|
||||
}
|
||||
}
|
||||
|
||||
static class CoordinateDeserializer extends JsonDeserializer<Coordinate> implements ContextualDeserializer {
|
||||
|
||||
boolean strict = false;
|
||||
|
||||
@Override
|
||||
public Coordinate deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
|
||||
var dsOption = doubleListFromCommaList(p.getValueAsString(), 2, 3, strict);
|
||||
if (dsOption.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
final List<Double> ds = dsOption.get();
|
||||
if (ds.size() == 2) {
|
||||
return new CoordinateXY(ds.get(0), ds.get(1));
|
||||
} else {
|
||||
return new Coordinate(ds.get(0), ds.get(1), ds.get(2));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonDeserializer<?> createContextual(DeserializationContext ctxt, BeanProperty property)
|
||||
throws JsonMappingException {
|
||||
|
||||
strict = isStrictDeserialization(property);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
static class MetadataJsonDeserializer extends JsonDeserializer<TileArchiveMetadata.TileArchiveMetadataJson> {
|
||||
@Override
|
||||
public TileArchiveMetadata.TileArchiveMetadataJson deserialize(JsonParser p, DeserializationContext ctxt)
|
||||
throws IOException {
|
||||
|
||||
try (JsonParser parser = p.getCodec().getFactory().createParser(p.getValueAsString())) {
|
||||
return parser.readValueAs(TileArchiveMetadata.TileArchiveMetadataJson.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class MetadataJsonSerializer extends JsonSerializer<TileArchiveMetadata.TileArchiveMetadataJson> {
|
||||
@Override
|
||||
public void serialize(TileArchiveMetadata.TileArchiveMetadataJson value, JsonGenerator gen,
|
||||
SerializerProvider serializers) throws IOException {
|
||||
|
||||
serializeEscapedJson(value, gen);
|
||||
}
|
||||
}
|
||||
|
||||
static class VectorLayersToStringSerializer extends JsonSerializer<List<LayerAttrStats.VectorLayer>> {
|
||||
@Override
|
||||
public void serialize(List<LayerAttrStats.VectorLayer> value, JsonGenerator gen, SerializerProvider serializers)
|
||||
throws IOException {
|
||||
|
||||
serializeEscapedJson(value, gen);
|
||||
}
|
||||
}
|
||||
|
||||
static class EmptyMapIfNullDeserializer extends JsonDeserializer<Map<String, String>> {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Map<String, String> deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
|
||||
return p.readValueAs(HashMap.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getNullValue(DeserializationContext ctxt) {
|
||||
return new HashMap<>();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -33,6 +33,7 @@ import java.util.Map;
|
|||
import java.util.OptionalLong;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.LongSupplier;
|
||||
|
@ -149,6 +150,9 @@ public class TileArchiveWriter {
|
|||
.addBuffer("reader_queue", queueSize)
|
||||
.sinkTo("encode", processThreads, writer::tileEncoderSink);
|
||||
|
||||
// ensure to initialize the archive BEFORE starting to write any tiles
|
||||
output.initialize();
|
||||
|
||||
// the tile writer will wait on the result of each batch to ensure tiles are written in order
|
||||
WorkerPipeline<TileBatch> writeBranch = pipeline.readFromQueue(writerQueue)
|
||||
.sinkTo("write", tileWriteThreads, writer::tileWriter);
|
||||
|
@ -179,10 +183,13 @@ public class TileArchiveWriter {
|
|||
loggers.newLine()
|
||||
.add(writer::getLastTileLogDetails);
|
||||
|
||||
var doneFuture = joinFutures(
|
||||
writeBranch.done(),
|
||||
layerStatsBranch == null ? CompletableFuture.completedFuture(null) : layerStatsBranch.done(),
|
||||
encodeBranch.done());
|
||||
final CompletableFuture<Void> tileWritersFuture = writeBranch.done();
|
||||
final CompletableFuture<Void> layerStatsFuture =
|
||||
layerStatsBranch == null ? CompletableFuture.completedFuture(null) : layerStatsBranch.done();
|
||||
final CompletableFuture<Void> archiveFinisher =
|
||||
CompletableFuture.allOf(tileWritersFuture, layerStatsFuture).thenRun(writer::finishArchive);
|
||||
|
||||
var doneFuture = joinFutures(tileWritersFuture, layerStatsFuture, encodeBranch.done(), archiveFinisher);
|
||||
loggers.awaitAndLog(doneFuture, config.logInterval());
|
||||
writer.printTileStats();
|
||||
timer.stop();
|
||||
|
@ -330,11 +337,15 @@ public class TileArchiveWriter {
|
|||
}
|
||||
}
|
||||
|
||||
private final AtomicBoolean firstTileWriterTracker = new AtomicBoolean(true);
|
||||
|
||||
private void tileWriter(Iterable<TileBatch> tileBatches) throws ExecutionException, InterruptedException {
|
||||
|
||||
final boolean firstTileWriter = firstTileWriterTracker.compareAndExchange(true, false);
|
||||
|
||||
var f = NumberFormat.getNumberInstance(Locale.getDefault());
|
||||
f.setMaximumFractionDigits(5);
|
||||
|
||||
archive.initialize();
|
||||
var order = archive.tileOrder();
|
||||
|
||||
TileCoord lastTile = null;
|
||||
|
@ -350,10 +361,15 @@ public class TileArchiveWriter {
|
|||
lastTile = encodedTile.coord();
|
||||
int z = tileCoord.z();
|
||||
if (z != currentZ) {
|
||||
if (time == null) {
|
||||
LOGGER.info("Starting z{}", z);
|
||||
} else {
|
||||
LOGGER.info("Finished z{} in {}, now starting z{}", currentZ, time.stop(), z);
|
||||
// for multiple writers the starting/finish log message of the _first_ tilewriter
|
||||
// is not 100% accurate in terms of overall "zoom-progress",
|
||||
// but it should be a "good-enough" indicator for "zoom-progress"-logging
|
||||
if (firstTileWriter) {
|
||||
if (time == null) {
|
||||
LOGGER.info("Starting z{}", z);
|
||||
} else {
|
||||
LOGGER.info("Finished z{} in {}, now starting z{}", currentZ, time.stop(), z);
|
||||
}
|
||||
}
|
||||
time = Timer.start();
|
||||
currentZ = z;
|
||||
|
@ -371,8 +387,6 @@ public class TileArchiveWriter {
|
|||
if (time != null) {
|
||||
LOGGER.info("Finished z{} in {}", currentZ, time.stop());
|
||||
}
|
||||
|
||||
archive.finish(tileArchiveMetadata.withLayerStats(layerAttrStats.getTileStats()));
|
||||
}
|
||||
|
||||
@SuppressWarnings("java:S2629")
|
||||
|
@ -386,6 +400,10 @@ public class TileArchiveWriter {
|
|||
return Stream.of(tilesByZoom).mapToLong(c -> c.get()).sum();
|
||||
}
|
||||
|
||||
private void finishArchive() {
|
||||
archive.finish(tileArchiveMetadata.withLayerStats(layerAttrStats.getTileStats()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a hash over encoded and compressed tile.
|
||||
* <p>
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package com.onthegomap.planetiler.archive;
|
||||
|
||||
import com.onthegomap.planetiler.config.PlanetilerConfig;
|
||||
import com.onthegomap.planetiler.files.ReadableFilesArchive;
|
||||
import com.onthegomap.planetiler.files.WriteableFilesArchive;
|
||||
import com.onthegomap.planetiler.mbtiles.Mbtiles;
|
||||
import com.onthegomap.planetiler.pmtiles.ReadablePmtiles;
|
||||
import com.onthegomap.planetiler.pmtiles.WriteablePmtiles;
|
||||
|
@ -56,6 +58,7 @@ public class TileArchives {
|
|||
new StreamArchiveConfig(config, options));
|
||||
case JSON -> WriteableJsonStreamArchive.newWriteToFile(archive.getLocalPath(),
|
||||
new StreamArchiveConfig(config, options));
|
||||
case FILES -> WriteableFilesArchive.newWriter(archive.getLocalPath(), options, config.force() || config.append());
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -73,6 +76,7 @@ public class TileArchives {
|
|||
case CSV, TSV -> throw new UnsupportedOperationException("reading CSV is not supported");
|
||||
case PROTO, PBF -> throw new UnsupportedOperationException("reading PROTO is not supported");
|
||||
case JSON -> throw new UnsupportedOperationException("reading JSON is not supported");
|
||||
case FILES -> ReadableFilesArchive.newReader(archive.getLocalPath(), options);
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
package com.onthegomap.planetiler.archive;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonEnumDefaultValue;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonDeserializer;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@JsonDeserialize(using = TileCompression.Deserializer.class)
|
||||
public enum TileCompression {
|
||||
|
||||
@JsonProperty("none")
|
||||
NONE("none"),
|
||||
@JsonProperty("gzip")
|
||||
GZIP("gzip"),
|
||||
@JsonProperty("unknown") @JsonEnumDefaultValue
|
||||
@JsonProperty("unknown")
|
||||
UNKNWON("unknown");
|
||||
|
||||
private final String id;
|
||||
|
@ -43,4 +48,16 @@ public enum TileCompression {
|
|||
public String id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
static class Deserializer extends JsonDeserializer<TileCompression> {
|
||||
@Override
|
||||
public TileCompression deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
|
||||
return findById(p.getValueAsString()).orElse(TileCompression.UNKNWON);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TileCompression getNullValue(DeserializationContext ctxt) {
|
||||
return TileCompression.GZIP;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,6 +44,8 @@ public interface WriteableTileArchive extends Closeable {
|
|||
*/
|
||||
default void finish(TileArchiveMetadata tileArchiveMetadata) {}
|
||||
|
||||
long bytesWritten();
|
||||
|
||||
interface TileWriter extends Closeable {
|
||||
|
||||
void write(TileEncodingResult encodingResult);
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.util.Map;
|
|||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.function.UnaryOperator;
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -485,6 +486,13 @@ public class Arguments {
|
|||
return parsed;
|
||||
}
|
||||
|
||||
public <T> T getObject(String key, String description, T defaultValue, Function<String, T> converter) {
|
||||
final String serializedValue = getArg(key);
|
||||
final T value = serializedValue == null ? defaultValue : converter.apply(serializedValue);
|
||||
logArgValue(key, description, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map from all the arguments provided to their values.
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import static com.onthegomap.planetiler.files.TileSchemeEncoding.X_TEMPLATE;
|
||||
import static com.onthegomap.planetiler.files.TileSchemeEncoding.Y_TEMPLATE;
|
||||
import static com.onthegomap.planetiler.files.TileSchemeEncoding.Z_TEMPLATE;
|
||||
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
public final class FilesArchiveUtils {
|
||||
|
||||
static final String OPTION_METADATA_PATH = "metadata_path";
|
||||
static final String OPTION_TILE_SCHEME = "tile_scheme";
|
||||
|
||||
private FilesArchiveUtils() {}
|
||||
|
||||
static Optional<Path> metadataPath(Path basePath, Arguments options) {
|
||||
final String metadataPathRaw = options.getString(
|
||||
OPTION_METADATA_PATH,
|
||||
"path to the metadata - use \"none\" to disable",
|
||||
"metadata.json"
|
||||
);
|
||||
if ("none".equals(metadataPathRaw)) {
|
||||
return Optional.empty();
|
||||
} else {
|
||||
final Path p = Paths.get(metadataPathRaw);
|
||||
if (p.isAbsolute()) {
|
||||
return Optional.of(p);
|
||||
}
|
||||
return Optional.of(basePath.resolve(p));
|
||||
}
|
||||
}
|
||||
|
||||
static TileSchemeEncoding tilesSchemeEncoding(Arguments options, Path basePath, String defaultTileScheme) {
|
||||
final String tileScheme = options.getString(
|
||||
OPTION_TILE_SCHEME,
|
||||
"the tile scheme (e.g. {z}/{x}/{y}.pbf, {x}/{y}/{z}.pbf)" +
|
||||
" - instead of {x}/{y} {xs}/{ys} can be used which splits the x/y into 2 directories each" +
|
||||
" which ensures <1000 files per directory",
|
||||
defaultTileScheme
|
||||
);
|
||||
return new TileSchemeEncoding(tileScheme, basePath);
|
||||
}
|
||||
|
||||
static BasePathWithTileSchemeEncoding basePathWithTileSchemeEncoding(Arguments options, Path basePath) {
|
||||
|
||||
final SplitShortcutPath split = SplitShortcutPath.split(basePath);
|
||||
|
||||
final String tileScheme = Objects
|
||||
.requireNonNullElse(split.tileSchemePart(), Path.of(Z_TEMPLATE, X_TEMPLATE, Y_TEMPLATE + ".pbf")).toString();
|
||||
|
||||
return new BasePathWithTileSchemeEncoding(
|
||||
split.basePart(),
|
||||
tilesSchemeEncoding(options, split.basePart(), tileScheme)
|
||||
);
|
||||
}
|
||||
|
||||
public static Path cleanBasePath(Path basePath) {
|
||||
return SplitShortcutPath.split(basePath).basePart();
|
||||
}
|
||||
|
||||
record BasePathWithTileSchemeEncoding(Path basePath, TileSchemeEncoding tileSchemeEncoding) {}
|
||||
|
||||
private record SplitShortcutPath(Path basePart, Path tileSchemePart) {
|
||||
public static SplitShortcutPath split(Path basePath) {
|
||||
Path basePart = Objects.requireNonNullElse(basePath.getRoot(), Paths.get(""));
|
||||
Path tileSchemePart = null;
|
||||
|
||||
boolean remainingIsTileScheme = false;
|
||||
for (int i = 0; i < basePath.getNameCount(); i++) {
|
||||
final Path part = basePath.getName(i);
|
||||
if (!remainingIsTileScheme && part.toString().contains("{")) {
|
||||
remainingIsTileScheme = true;
|
||||
}
|
||||
if (remainingIsTileScheme) {
|
||||
tileSchemePart = tileSchemePart == null ? part : tileSchemePart.resolve(part);
|
||||
} else {
|
||||
basePart = basePart.resolve(part);
|
||||
}
|
||||
}
|
||||
|
||||
if (tileSchemePart == null) {
|
||||
// just in case: use the "original" basePath in case no tile scheme is included, but basePart _should_ be identical
|
||||
return new SplitShortcutPath(basePath, null);
|
||||
} else {
|
||||
return new SplitShortcutPath(basePart, tileSchemePart);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.onthegomap.planetiler.archive.ReadableTileArchive;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadataDeSer;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.util.CloseableIterator;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Reads tiles from a folder structure (e.g. BASEPATH/{z}/{x}{y}.pbf). Counterpart to {@link WriteableFilesArchive}.
|
||||
* <p/>
|
||||
* Supported arguments
|
||||
* <dl>
|
||||
* <dt>(files_)tile_scheme</dt>
|
||||
* <dd>The tile scheme e.g. {x}/{y}/{z}.pbf. The default is {z}/{x}/{y}.pbf. See {@link TileSchemeEncoding} for more
|
||||
* details.</dd>
|
||||
* <dt>(files_)metadata_path</dt>
|
||||
* <dd>The path the meta data should be written to. The default is BASEPATH/metadata.json. "none" can be used to
|
||||
* suppress writing metadata.</dd>
|
||||
* </ul>
|
||||
*
|
||||
* @see WriteableFilesArchive
|
||||
* @see TileSchemeEncoding
|
||||
*/
|
||||
public class ReadableFilesArchive implements ReadableTileArchive {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReadableFilesArchive.class);
|
||||
|
||||
private final Path basePath;
|
||||
private final Path metadataPath;
|
||||
private final Function<TileCoord, Path> tileSchemeEncoder;
|
||||
private final Function<Path, Optional<TileCoord>> tileSchemeDecoder;
|
||||
|
||||
private final int searchDepth;
|
||||
|
||||
private ReadableFilesArchive(Path basePath, Arguments options) {
|
||||
|
||||
final var pathAndScheme = FilesArchiveUtils.basePathWithTileSchemeEncoding(options, basePath);
|
||||
basePath = pathAndScheme.basePath();
|
||||
|
||||
LOGGER.atInfo().log(() -> "using " + pathAndScheme.basePath() + " as base files archive path");
|
||||
|
||||
this.basePath = basePath;
|
||||
Preconditions.checkArgument(
|
||||
Files.isDirectory(basePath),
|
||||
"require \"" + basePath + "\" to be an existing directory"
|
||||
);
|
||||
this.metadataPath = FilesArchiveUtils.metadataPath(basePath, options).orElse(null);
|
||||
final TileSchemeEncoding tileSchemeEncoding = pathAndScheme.tileSchemeEncoding();
|
||||
this.tileSchemeEncoder = tileSchemeEncoding.encoder();
|
||||
this.tileSchemeDecoder = tileSchemeEncoding.decoder();
|
||||
this.searchDepth = tileSchemeEncoding.searchDepth();
|
||||
}
|
||||
|
||||
public static ReadableFilesArchive newReader(Path basePath, Arguments options) {
|
||||
return new ReadableFilesArchive(basePath, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("java:S1168") // returning null is in sync with other implementations: mbtiles and pmtiles
|
||||
public byte[] getTile(int x, int y, int z) {
|
||||
final Path absolute = tileSchemeEncoder.apply(TileCoord.ofXYZ(x, y, z));
|
||||
if (!Files.exists(absolute)) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return Files.readAllBytes(absolute);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CloseableIterator<TileCoord> getAllTileCoords() {
|
||||
|
||||
try {
|
||||
final Stream<TileCoord> it = Files.find(basePath, searchDepth, (p, a) -> a.isRegularFile())
|
||||
.map(tileSchemeDecoder)
|
||||
.flatMap(Optional::stream);
|
||||
return CloseableIterator.of(it);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TileArchiveMetadata metadata() {
|
||||
if (metadataPath != null && Files.exists(metadataPath)) {
|
||||
try (InputStream is = Files.newInputStream(metadataPath)) {
|
||||
return TileArchiveMetadataDeSer.mbtilesMapper().readValue(is, TileArchiveMetadata.class);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// nothing to do here
|
||||
}
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
/**
|
||||
* Tile scheme encoding i.e. encoding and decoding of tile coordinates to a relative path.
|
||||
* <p/>
|
||||
* The tile scheme is a template string that supports the following templates: {x}, {y}, {z}, {xs}, {ys}. {xs} and {ys}
|
||||
* are "safe" options that split the x/s coordinate into two folders, ensuring that each folder has less than 1000
|
||||
* children.
|
||||
* <table>
|
||||
* <tr>
|
||||
* <th>Tile Scheme</th>
|
||||
* <th>Example Path</th>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{z}/{x}/{y}.pbf</td>
|
||||
* <td>3/1/2.pbf</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{x}/{y}/{z}.pbf</td>
|
||||
* <td>1/2/3.pbf</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{x}-{y}-{z}.pbf</td>
|
||||
* <td>1-2-3.pbf</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{x}/a/{y}/b{z}.pbf</td>
|
||||
* <td>1/a/2/b3.pbf</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{z}/{x}/{y}.pbf.gz</td>
|
||||
* <td>3/1/2.pbf.gz</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{z}/{xs}/{ys}.pbf</td>
|
||||
* <td>3/000/001/000/002.pbf</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{z}/{x}/{ys}.pbf</td>
|
||||
* <td>3/1/000/002.pbf</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td>{z}/{xs}/{y}.pbf</td>
|
||||
* <td>3/000/001/2.pbf</td>
|
||||
* </tr>
|
||||
* </table>
|
||||
*/
|
||||
public class TileSchemeEncoding {
|
||||
|
||||
static final String X_TEMPLATE = "{x}";
|
||||
static final String X_SAFE_TEMPLATE = "{xs}";
|
||||
static final String Y_TEMPLATE = "{y}";
|
||||
static final String Y_SAFE_TEMPLATE = "{ys}";
|
||||
static final String Z_TEMPLATE = "{z}";
|
||||
|
||||
private final String tileScheme;
|
||||
private final Path basePath;
|
||||
|
||||
/**
|
||||
* @param tileScheme the tile scheme to use e.g. {z}/{x}/{y}.pbf
|
||||
* @param basePath the base path to append the generated relative tile path to
|
||||
*/
|
||||
public TileSchemeEncoding(String tileScheme, Path basePath) {
|
||||
this.tileScheme = validate(tileScheme);
|
||||
this.basePath = basePath;
|
||||
}
|
||||
|
||||
public Function<TileCoord, Path> encoder() {
|
||||
final boolean xSafe = tileScheme.contains(X_SAFE_TEMPLATE);
|
||||
final boolean ySafe = tileScheme.contains(Y_SAFE_TEMPLATE);
|
||||
return tileCoord -> {
|
||||
|
||||
String p = tileScheme.replace(Z_TEMPLATE, Integer.toString(tileCoord.z()));
|
||||
|
||||
if (xSafe) {
|
||||
final String colStr = String.format("%06d", tileCoord.x());
|
||||
p = p.replace(X_SAFE_TEMPLATE, Paths.get(colStr.substring(0, 3), colStr.substring(3)).toString());
|
||||
} else {
|
||||
p = p.replace(X_TEMPLATE, Integer.toString(tileCoord.x()));
|
||||
}
|
||||
|
||||
if (ySafe) {
|
||||
final String rowStr = String.format("%06d", tileCoord.y());
|
||||
p = p.replace(Y_SAFE_TEMPLATE, Paths.get(rowStr.substring(0, 3), rowStr.substring(3)).toString());
|
||||
} else {
|
||||
p = p.replace(Y_TEMPLATE, Integer.toString(tileCoord.y()));
|
||||
}
|
||||
|
||||
return basePath.resolve(Paths.get(p));
|
||||
};
|
||||
}
|
||||
|
||||
Function<Path, Optional<TileCoord>> decoder() {
|
||||
|
||||
final String tmpPath = basePath.resolve(tileScheme).toAbsolutePath().toString();
|
||||
|
||||
@SuppressWarnings("java:S1075") final String escapedPathSeparator = "\\" + File.separator;
|
||||
|
||||
final Pattern pathPattern = Pattern.compile(
|
||||
Pattern.quote(tmpPath)
|
||||
.replace(X_TEMPLATE, "\\E(?<x>\\d+)\\Q")
|
||||
.replace(Y_TEMPLATE, "\\E(?<y>\\d+)\\Q")
|
||||
.replace(Z_TEMPLATE, "\\E(?<z>\\d+)\\Q")
|
||||
.replace(X_SAFE_TEMPLATE, "\\E(?<x0>\\d+)" + escapedPathSeparator + "(?<x1>\\d+)\\Q")
|
||||
.replace(Y_SAFE_TEMPLATE, "\\E(?<y0>\\d+)" + escapedPathSeparator + "(?<y1>\\d+)\\Q")
|
||||
);
|
||||
|
||||
final boolean xSafe = tileScheme.contains(X_SAFE_TEMPLATE);
|
||||
final boolean ySafe = tileScheme.contains(Y_SAFE_TEMPLATE);
|
||||
|
||||
return path -> {
|
||||
final Matcher m = pathPattern.matcher(path.toAbsolutePath().toString());
|
||||
if (!m.matches()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
final int x = xSafe ? Integer.parseInt(m.group("x0") + m.group("x1")) : Integer.parseInt(m.group("x"));
|
||||
final int y = ySafe ? Integer.parseInt(m.group("y0") + m.group("y1")) : Integer.parseInt(m.group("y"));
|
||||
final int z = Integer.parseInt(m.group("z"));
|
||||
|
||||
return Optional.of(TileCoord.ofXYZ(x, y, z));
|
||||
};
|
||||
}
|
||||
|
||||
int searchDepth() {
|
||||
return Paths.get(tileScheme).getNameCount() +
|
||||
StringUtils.countMatches(tileScheme, X_SAFE_TEMPLATE) +
|
||||
StringUtils.countMatches(tileScheme, Y_SAFE_TEMPLATE);
|
||||
}
|
||||
|
||||
TileOrder preferredTileOrder() {
|
||||
// there's only TMS currently - but once there are more, this can be changed according to the scheme
|
||||
return TileOrder.TMS;
|
||||
}
|
||||
|
||||
private static String validate(String tileScheme) {
|
||||
if (Paths.get(tileScheme).isAbsolute()) {
|
||||
throw new IllegalArgumentException("tile scheme is not allowed to be absolute");
|
||||
}
|
||||
if (StringUtils.countMatches(tileScheme, Z_TEMPLATE) != 1 ||
|
||||
StringUtils.countMatches(tileScheme, X_TEMPLATE) + StringUtils.countMatches(tileScheme, X_SAFE_TEMPLATE) != 1 ||
|
||||
StringUtils.countMatches(tileScheme, Y_TEMPLATE) + StringUtils.countMatches(tileScheme, Y_SAFE_TEMPLATE) != 1) {
|
||||
throw new IllegalArgumentException(
|
||||
"tile scheme must contain ('%s' OR '%s') AND ('%s' OR '%s' ) AND '%s'"
|
||||
.formatted(X_TEMPLATE, X_SAFE_TEMPLATE, Y_TEMPLATE, Y_SAFE_TEMPLATE, Z_TEMPLATE));
|
||||
|
||||
}
|
||||
if (tileScheme.contains("\\E") || tileScheme.contains("\\Q")) {
|
||||
throw new IllegalArgumentException("regex quotes are not allowed");
|
||||
}
|
||||
return tileScheme;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this == o || (o instanceof TileSchemeEncoding that && Objects.equals(tileScheme, that.tileScheme) &&
|
||||
Objects.equals(basePath, that.basePath));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(tileScheme, basePath);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TileSchemeEncoding[" +
|
||||
"tileScheme='" + tileScheme + '\'' +
|
||||
", basePath=" + basePath +
|
||||
']';
|
||||
}
|
||||
}
|
|
@ -0,0 +1,175 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadataDeSer;
|
||||
import com.onthegomap.planetiler.archive.TileEncodingResult;
|
||||
import com.onthegomap.planetiler.archive.WriteableTileArchive;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import com.onthegomap.planetiler.stats.Counter;
|
||||
import com.onthegomap.planetiler.util.CountingOutputStream;
|
||||
import com.onthegomap.planetiler.util.FileUtils;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.function.Function;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Writes tiles as separate files. The default tile scheme is z/x/y.pbf.
|
||||
* <p/>
|
||||
* Supported arguments
|
||||
* <dl>
|
||||
* <dt>(files_)tile_scheme</dt>
|
||||
* <dd>The tile scheme e.g. {x}/{y}/{z}.pbf. The default is {z}/{x}/{y}.pbf. See {@link TileSchemeEncoding} for more
|
||||
* details.</dd>
|
||||
* <dt>(files_)metadata_path</dt>
|
||||
* <dd>The path the meta data should be written to. The default is BASEPATH/metadata.json. "none" can be used to
|
||||
* suppress writing metadata.</dd>
|
||||
* </ul>
|
||||
*
|
||||
* Usages:
|
||||
*
|
||||
* <pre>
|
||||
* --output=/path/to/tiles/ --files_tile_scheme={z}/{x}/{y}.pbf --files_metadata_path=/some/other/path/metadata.json
|
||||
* --output=/path/to/tiles/{z}/{x}/{y}.pbf
|
||||
* --output=/path/to/tiles?format=files&tile_scheme={z}/{x}/{y}.pbf
|
||||
* </pre>
|
||||
*
|
||||
* @see ReadableFilesArchive
|
||||
* @see TileSchemeEncoding
|
||||
*/
|
||||
public class WriteableFilesArchive implements WriteableTileArchive {
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(WriteableFilesArchive.class);
|
||||
|
||||
private final Counter.MultiThreadCounter bytesWritten = Counter.newMultiThreadCounter();
|
||||
|
||||
private final Path basePath;
|
||||
private final Path metadataPath;
|
||||
|
||||
private final Function<TileCoord, Path> tileSchemeEncoder;
|
||||
|
||||
private final TileOrder tileOrder;
|
||||
|
||||
private WriteableFilesArchive(Path basePath, Arguments options, boolean overwriteMetadata) {
|
||||
|
||||
final var pathAndScheme = FilesArchiveUtils.basePathWithTileSchemeEncoding(options, basePath);
|
||||
basePath = pathAndScheme.basePath();
|
||||
|
||||
LOGGER.atInfo().log("using {} as base files archive path", basePath);
|
||||
|
||||
this.basePath = createValidateDirectory(basePath);
|
||||
this.metadataPath = FilesArchiveUtils.metadataPath(basePath, options)
|
||||
.flatMap(p -> FilesArchiveUtils.metadataPath(p.getParent(), options))
|
||||
.orElse(null);
|
||||
if (this.metadataPath != null && Files.exists(this.metadataPath)) {
|
||||
if (!overwriteMetadata) {
|
||||
throw new IllegalArgumentException(this.metadataPath + " already exists");
|
||||
} else if (!Files.isRegularFile(this.metadataPath)) {
|
||||
throw new IllegalArgumentException("require " + this.metadataPath + " to be a regular file");
|
||||
}
|
||||
}
|
||||
final TileSchemeEncoding tileSchemeEncoding = pathAndScheme.tileSchemeEncoding();
|
||||
this.tileSchemeEncoder = tileSchemeEncoding.encoder();
|
||||
this.tileOrder = tileSchemeEncoding.preferredTileOrder();
|
||||
}
|
||||
|
||||
public static WriteableFilesArchive newWriter(Path basePath, Arguments options, boolean overwriteMetadata) {
|
||||
return new WriteableFilesArchive(basePath, options, overwriteMetadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean deduplicates() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TileOrder tileOrder() {
|
||||
return tileOrder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TileWriter newTileWriter() {
|
||||
return new TileFilesWriter(basePath, tileSchemeEncoder, bytesWritten.counterForThread());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish(TileArchiveMetadata tileArchiveMetadata) {
|
||||
if (metadataPath == null) {
|
||||
return;
|
||||
}
|
||||
try (OutputStream s = new CountingOutputStream(Files.newOutputStream(metadataPath), bytesWritten::incBy)) {
|
||||
TileArchiveMetadataDeSer.mbtilesMapper().writeValue(s, tileArchiveMetadata);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long bytesWritten() {
|
||||
return bytesWritten.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
private static Path createValidateDirectory(Path p) {
|
||||
if (!Files.exists(p)) {
|
||||
FileUtils.createDirectory(p);
|
||||
}
|
||||
Preconditions.checkArgument(
|
||||
Files.isDirectory(p),
|
||||
"require \"" + p + "\" to be a directory"
|
||||
);
|
||||
return p;
|
||||
}
|
||||
|
||||
private static class TileFilesWriter implements TileWriter {
|
||||
|
||||
private final Function<TileCoord, Path> tileSchemeEncoder;
|
||||
private final Counter bytesWritten;
|
||||
private Path lastCheckedFolder;
|
||||
|
||||
TileFilesWriter(Path basePath, Function<TileCoord, Path> tileSchemeEncoder, Counter bytesWritten) {
|
||||
this.tileSchemeEncoder = tileSchemeEncoder;
|
||||
this.lastCheckedFolder = basePath;
|
||||
this.bytesWritten = bytesWritten;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void write(TileEncodingResult encodingResult) {
|
||||
|
||||
final byte[] data = encodingResult.tileData();
|
||||
|
||||
final Path file = tileSchemeEncoder.apply(encodingResult.coord());
|
||||
final Path folder = file.getParent();
|
||||
|
||||
// tiny optimization in order to avoid too many unnecessary "folder-exists-checks" (I/O)
|
||||
// only effective when the tileScheme is z/x/y but doesn't really harm otherwise
|
||||
if (!lastCheckedFolder.equals(folder) && !Files.exists(folder)) {
|
||||
FileUtils.createDirectory(folder);
|
||||
}
|
||||
lastCheckedFolder = folder;
|
||||
try {
|
||||
Files.write(file, data);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
|
||||
bytesWritten.incBy(data.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// nothing to do here
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,17 +1,11 @@
|
|||
package com.onthegomap.planetiler.mbtiles;
|
||||
|
||||
import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_ABSENT;
|
||||
import static com.onthegomap.planetiler.util.Format.joinCoordinates;
|
||||
|
||||
import com.carrotsearch.hppc.LongIntHashMap;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.onthegomap.planetiler.archive.ReadableTileArchive;
|
||||
import com.onthegomap.planetiler.archive.Tile;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
|
||||
import com.onthegomap.planetiler.archive.TileCompression;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadataDeSer;
|
||||
import com.onthegomap.planetiler.archive.TileEncodingResult;
|
||||
import com.onthegomap.planetiler.archive.WriteableTileArchive;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
|
@ -19,9 +13,8 @@ import com.onthegomap.planetiler.geo.TileCoord;
|
|||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import com.onthegomap.planetiler.reader.FileFormatException;
|
||||
import com.onthegomap.planetiler.util.CloseableIterator;
|
||||
import com.onthegomap.planetiler.util.FileUtils;
|
||||
import com.onthegomap.planetiler.util.Format;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import com.onthegomap.planetiler.util.Parse;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Connection;
|
||||
|
@ -34,17 +27,15 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.LongSupplier;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.sqlite.SQLiteConfig;
|
||||
|
@ -91,9 +82,6 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
private static final String METADATA_COL_VALUE = "value";
|
||||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(Mbtiles.class);
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
||||
.registerModules(new Jdk8Module())
|
||||
.setSerializationInclusion(NON_ABSENT);
|
||||
|
||||
// load the sqlite driver
|
||||
static {
|
||||
|
@ -110,7 +98,9 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
private final boolean vacuumAnalyze;
|
||||
private PreparedStatement getTileStatement = null;
|
||||
|
||||
private Mbtiles(Connection connection, Arguments arguments) {
|
||||
private final LongSupplier bytesWritten;
|
||||
|
||||
private Mbtiles(Connection connection, Arguments arguments, LongSupplier bytesWritten) {
|
||||
this.connection = connection;
|
||||
this.compactDb = arguments.getBoolean(
|
||||
COMPACT_DB + "|" + LEGACY_COMPACT_DB,
|
||||
|
@ -127,6 +117,7 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
"mbtiles: vacuum analyze sqlite DB after writing",
|
||||
false
|
||||
);
|
||||
this.bytesWritten = bytesWritten;
|
||||
}
|
||||
|
||||
/** Returns a new mbtiles file that won't get written to disk. Useful for toy use-cases like unit tests. */
|
||||
|
@ -138,7 +129,7 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
public static Mbtiles newInMemoryDatabase(Arguments options) {
|
||||
SQLiteConfig config = new SQLiteConfig();
|
||||
config.setApplicationId(MBTILES_APPLICATION_ID);
|
||||
return new Mbtiles(newConnection("jdbc:sqlite::memory:", config, options), options);
|
||||
return new Mbtiles(newConnection("jdbc:sqlite::memory:", config, options), options, () -> 0);
|
||||
}
|
||||
|
||||
/** Alias for {@link #newInMemoryDatabase(boolean)} */
|
||||
|
@ -160,7 +151,7 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
sqliteConfig.setTempStore(SQLiteConfig.TempStore.MEMORY);
|
||||
sqliteConfig.setApplicationId(MBTILES_APPLICATION_ID);
|
||||
var connection = newConnection("jdbc:sqlite:" + path.toAbsolutePath(), sqliteConfig, options);
|
||||
return new Mbtiles(connection, options);
|
||||
return new Mbtiles(connection, options, () -> FileUtils.size(path));
|
||||
}
|
||||
|
||||
/** Returns a new connection to an mbtiles file optimized for reads. */
|
||||
|
@ -182,7 +173,7 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
// helps with 3 or more threads concurrently accessing:
|
||||
// config.setOpenMode(SQLiteOpenMode.NOMUTEX);
|
||||
Connection connection = newConnection("jdbc:sqlite:" + path.toAbsolutePath(), config, options);
|
||||
return new Mbtiles(connection, options);
|
||||
return new Mbtiles(connection, options, () -> 0);
|
||||
}
|
||||
|
||||
private static Connection newConnection(String url, SQLiteConfig defaults, Arguments args) {
|
||||
|
@ -240,6 +231,11 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long bytesWritten() {
|
||||
return bytesWritten.getAsLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
|
@ -471,33 +467,8 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
* @see <a href="https://github.com/mapbox/mbtiles-spec/blob/master/1.3/spec.md#vector-tileset-metadata">MBtiles
|
||||
* schema</a>
|
||||
*/
|
||||
// TODO add tilestats
|
||||
public record MetadataJson(
|
||||
@JsonProperty("vector_layers") List<LayerAttrStats.VectorLayer> vectorLayers
|
||||
) {
|
||||
|
||||
public MetadataJson(LayerAttrStats.VectorLayer... layers) {
|
||||
this(List.of(layers));
|
||||
}
|
||||
|
||||
public static MetadataJson fromJson(String json) {
|
||||
try {
|
||||
return json == null ? null : objectMapper.readValue(json, MetadataJson.class);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalStateException("Invalid metadata json: " + json, e);
|
||||
}
|
||||
}
|
||||
|
||||
public String toJson() {
|
||||
try {
|
||||
return objectMapper.writeValueAsString(this);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IllegalArgumentException("Unable to encode as string: " + this, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Contents of a row of the tiles_shallow table. */
|
||||
/** Contents of a row of the tiles_shallow table. */
|
||||
private record TileShallowEntry(TileCoord coord, int tileDataId) {}
|
||||
|
||||
/** Contents of a row of the tiles_data table. */
|
||||
|
@ -817,19 +788,18 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
public class Metadata {
|
||||
|
||||
/** Inserts a row into the metadata table that sets {@code name=value}. */
|
||||
public Metadata setMetadata(String name, Object value) {
|
||||
public Metadata setMetadata(String name, String value) {
|
||||
if (value != null) {
|
||||
String stringValue = value.toString();
|
||||
LOGGER.debug("Set mbtiles metadata: {}={}", name,
|
||||
stringValue.length() > 1_000 ?
|
||||
(stringValue.substring(0, 1_000) + "... " + (stringValue.length() - 1_000) + " more characters") :
|
||||
stringValue);
|
||||
value.length() > 1_000 ?
|
||||
(value.substring(0, 1_000) + "... " + (value.length() - 1_000) + " more characters") :
|
||||
value);
|
||||
try (
|
||||
PreparedStatement statement = connection.prepareStatement(
|
||||
"INSERT INTO " + METADATA_TABLE + " (" + METADATA_COL_NAME + "," + METADATA_COL_VALUE + ") VALUES(?, ?);")
|
||||
) {
|
||||
statement.setString(1, name);
|
||||
statement.setString(2, stringValue);
|
||||
statement.setString(2, value);
|
||||
statement.execute();
|
||||
} catch (SQLException throwables) {
|
||||
LOGGER.error("Error setting metadata " + name + "=" + value, throwables);
|
||||
|
@ -838,14 +808,6 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inserts a row into the metadata table that sets the value for {@code "json"} key to {@code value} serialized as a
|
||||
* string.
|
||||
*/
|
||||
public Metadata setJson(MetadataJson value) {
|
||||
return value == null ? this : setMetadata("json", value.toJson());
|
||||
}
|
||||
|
||||
/** Returns all key-value pairs from the metadata table. */
|
||||
public Map<String, String> getAll() {
|
||||
TreeMap<String, String> result = new TreeMap<>();
|
||||
|
@ -874,41 +836,9 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
* specification</a>
|
||||
*/
|
||||
public Metadata set(TileArchiveMetadata tileArchiveMetadata) {
|
||||
|
||||
final TileCompression tileCompression = tileArchiveMetadata.tileCompression();
|
||||
if (tileCompression != null && tileCompression != TileCompression.GZIP) {
|
||||
LOGGER.warn("will use {} for tile compression, but the mbtiles specification actually requires gzip",
|
||||
tileCompression.id());
|
||||
}
|
||||
|
||||
var map = new LinkedHashMap<>(tileArchiveMetadata.toMap());
|
||||
|
||||
setMetadata(TileArchiveMetadata.FORMAT_KEY, tileArchiveMetadata.format());
|
||||
var center = tileArchiveMetadata.center();
|
||||
var zoom = tileArchiveMetadata.zoom();
|
||||
if (center != null) {
|
||||
if (zoom != null) {
|
||||
setMetadata(TileArchiveMetadata.CENTER_KEY, joinCoordinates(center.x, center.y, Math.ceil(zoom)));
|
||||
} else {
|
||||
setMetadata(TileArchiveMetadata.CENTER_KEY, joinCoordinates(center.x, center.y));
|
||||
}
|
||||
}
|
||||
var bounds = tileArchiveMetadata.bounds();
|
||||
if (bounds != null) {
|
||||
setMetadata(TileArchiveMetadata.BOUNDS_KEY,
|
||||
joinCoordinates(bounds.getMinX(), bounds.getMinY(), bounds.getMaxX(), bounds.getMaxY()));
|
||||
}
|
||||
setJson(new MetadataJson(tileArchiveMetadata.vectorLayers()));
|
||||
|
||||
map.remove(TileArchiveMetadata.FORMAT_KEY);
|
||||
map.remove(TileArchiveMetadata.CENTER_KEY);
|
||||
map.remove(TileArchiveMetadata.ZOOM_KEY);
|
||||
map.remove(TileArchiveMetadata.BOUNDS_KEY);
|
||||
map.remove(TileArchiveMetadata.VECTOR_LAYERS_KEY);
|
||||
|
||||
for (var entry : map.entrySet()) {
|
||||
setMetadata(entry.getKey(), entry.getValue());
|
||||
}
|
||||
TileArchiveMetadataDeSer.mbtilesMapper()
|
||||
.convertValue(tileArchiveMetadata, new TypeReference<Map<String, String>>() {})
|
||||
.forEach(this::setMetadata);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -917,46 +847,7 @@ public final class Mbtiles implements WriteableTileArchive, ReadableTileArchive
|
|||
*/
|
||||
public TileArchiveMetadata get() {
|
||||
Map<String, String> map = new HashMap<>(getAll());
|
||||
String[] bounds = map.containsKey(TileArchiveMetadata.BOUNDS_KEY) ?
|
||||
map.remove(TileArchiveMetadata.BOUNDS_KEY).split(",") : null;
|
||||
String[] center = map.containsKey(TileArchiveMetadata.CENTER_KEY) ?
|
||||
map.remove(TileArchiveMetadata.CENTER_KEY).split(",") : null;
|
||||
var metadataJson = MetadataJson.fromJson(map.remove("json"));
|
||||
|
||||
|
||||
String tileCompressionRaw = map.remove(TileArchiveMetadata.COMPRESSION_KEY);
|
||||
TileCompression tileCompression = tileCompressionRaw == null ? TileCompression.GZIP :
|
||||
TileCompression.findById(tileCompressionRaw).orElseGet(() -> {
|
||||
LOGGER.warn("unknown tile compression {}", tileCompressionRaw);
|
||||
return TileCompression.UNKNWON;
|
||||
});
|
||||
|
||||
|
||||
return new TileArchiveMetadata(
|
||||
map.remove(TileArchiveMetadata.NAME_KEY),
|
||||
map.remove(TileArchiveMetadata.DESCRIPTION_KEY),
|
||||
map.remove(TileArchiveMetadata.ATTRIBUTION_KEY),
|
||||
map.remove(TileArchiveMetadata.VERSION_KEY),
|
||||
map.remove(TileArchiveMetadata.TYPE_KEY),
|
||||
map.remove(TileArchiveMetadata.FORMAT_KEY),
|
||||
bounds == null || bounds.length < 4 ? null : new Envelope(
|
||||
Double.parseDouble(bounds[0]),
|
||||
Double.parseDouble(bounds[2]),
|
||||
Double.parseDouble(bounds[1]),
|
||||
Double.parseDouble(bounds[3])
|
||||
),
|
||||
center == null || center.length < 2 ? null : new CoordinateXY(
|
||||
Double.parseDouble(center[0]),
|
||||
Double.parseDouble(center[1])
|
||||
),
|
||||
center == null || center.length < 3 ? null : Double.parseDouble(center[2]),
|
||||
Parse.parseIntOrNull(map.remove(TileArchiveMetadata.MINZOOM_KEY)),
|
||||
Parse.parseIntOrNull(map.remove(TileArchiveMetadata.MAXZOOM_KEY)),
|
||||
metadataJson == null ? null : metadataJson.vectorLayers,
|
||||
// any left-overs:
|
||||
map,
|
||||
tileCompression
|
||||
);
|
||||
return TileArchiveMetadataDeSer.mbtilesMapper().convertValue(map, TileArchiveMetadata.class);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import java.util.LinkedHashMap;
|
|||
import java.util.List;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
public class ReadablePmtiles implements ReadableTileArchive {
|
||||
private final SeekableByteChannel channel;
|
||||
|
@ -139,11 +140,14 @@ public class ReadablePmtiles implements ReadableTileArchive {
|
|||
map.remove(TileArchiveMetadata.TYPE_KEY),
|
||||
format,
|
||||
header.bounds(),
|
||||
header.center(),
|
||||
(double) header.centerZoom(),
|
||||
new Coordinate(
|
||||
header.center().getX(),
|
||||
header.center().getY(),
|
||||
header.centerZoom()
|
||||
),
|
||||
(int) header.minZoom(),
|
||||
(int) header.maxZoom(),
|
||||
jsonMetadata.vectorLayers(),
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(jsonMetadata.vectorLayers()),
|
||||
map,
|
||||
tileCompression
|
||||
);
|
||||
|
|
|
@ -10,6 +10,7 @@ import com.onthegomap.planetiler.config.PlanetilerConfig;
|
|||
import com.onthegomap.planetiler.geo.GeoUtils;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import com.onthegomap.planetiler.util.FileUtils;
|
||||
import com.onthegomap.planetiler.util.Format;
|
||||
import com.onthegomap.planetiler.util.Gzip;
|
||||
import com.onthegomap.planetiler.util.SeekableInMemoryByteChannel;
|
||||
|
@ -27,6 +28,7 @@ import java.util.LinkedHashMap;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.function.LongSupplier;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -47,9 +49,12 @@ public final class WriteablePmtiles implements WriteableTileArchive {
|
|||
private long numAddressedTiles = 0;
|
||||
private boolean isClustered = true;
|
||||
|
||||
private WriteablePmtiles(SeekableByteChannel channel) throws IOException {
|
||||
private final LongSupplier bytesWritten;
|
||||
|
||||
private WriteablePmtiles(SeekableByteChannel channel, LongSupplier bytesWritten) throws IOException {
|
||||
this.out = channel;
|
||||
out.write(ByteBuffer.allocate(INIT_SECTION));
|
||||
this.bytesWritten = bytesWritten;
|
||||
}
|
||||
|
||||
private static Directories makeDirectoriesWithLeaves(List<Pmtiles.Entry> subEntries, int leafSize, int attemptNum)
|
||||
|
@ -114,11 +119,13 @@ public final class WriteablePmtiles implements WriteableTileArchive {
|
|||
|
||||
public static WriteablePmtiles newWriteToFile(Path path) throws IOException {
|
||||
return new WriteablePmtiles(
|
||||
FileChannel.open(path, StandardOpenOption.CREATE, StandardOpenOption.READ, StandardOpenOption.WRITE));
|
||||
FileChannel.open(path, StandardOpenOption.CREATE, StandardOpenOption.READ, StandardOpenOption.WRITE),
|
||||
() -> FileUtils.size(path)
|
||||
);
|
||||
}
|
||||
|
||||
public static WriteablePmtiles newWriteToMemory(SeekableInMemoryByteChannel bytes) throws IOException {
|
||||
return new WriteablePmtiles(bytes);
|
||||
return new WriteablePmtiles(bytes, () -> 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -238,6 +245,11 @@ public final class WriteablePmtiles implements WriteableTileArchive {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long bytesWritten() {
|
||||
return bytesWritten.getAsLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
out.close();
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package com.onthegomap.planetiler.stats;
|
||||
|
||||
import com.onthegomap.planetiler.util.FileUtils;
|
||||
import com.onthegomap.planetiler.util.MemoryEstimator;
|
||||
import io.prometheus.client.Collector;
|
||||
import io.prometheus.client.CollectorRegistry;
|
||||
|
@ -49,7 +48,7 @@ class PrometheusStats implements Stats {
|
|||
private PushGateway pg;
|
||||
private ScheduledExecutorService executor;
|
||||
private final String job;
|
||||
private final Map<String, Path> filesToMonitor = new ConcurrentSkipListMap<>();
|
||||
private final Map<String, MonitoredFile> filesToMonitor = new ConcurrentSkipListMap<>();
|
||||
private final Map<String, Long> dataErrorCounters = new ConcurrentHashMap<>();
|
||||
private final Map<String, MemoryEstimator.HasEstimate> heapObjectsToMonitor = new ConcurrentSkipListMap<>();
|
||||
|
||||
|
@ -171,7 +170,7 @@ class PrometheusStats implements Stats {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Path> monitoredFiles() {
|
||||
public Map<String, MonitoredFile> monitoredFiles() {
|
||||
return filesToMonitor;
|
||||
}
|
||||
|
||||
|
@ -248,11 +247,13 @@ class PrometheusStats implements Stats {
|
|||
@Override
|
||||
public List<MetricFamilySamples> collect() {
|
||||
List<Collector.MetricFamilySamples> results = new ArrayList<>();
|
||||
for (var file : filesToMonitor.entrySet()) {
|
||||
String name = sanitizeMetricName(file.getKey());
|
||||
Path path = file.getValue();
|
||||
for (var entry : filesToMonitor.entrySet()) {
|
||||
String name = sanitizeMetricName(entry.getKey());
|
||||
MonitoredFile monitoredFile = entry.getValue();
|
||||
Path path = monitoredFile.path();
|
||||
long size = monitoredFile.sizeProvider().getAsLong();
|
||||
results.add(new GaugeMetricFamily(BASE + "file_" + name + "_size_bytes", "Size of " + name + " in bytes",
|
||||
FileUtils.size(path)));
|
||||
size));
|
||||
if (Files.exists(path)) {
|
||||
try {
|
||||
FileStore fileStore = Files.getFileStore(path);
|
||||
|
|
|
@ -60,7 +60,7 @@ public interface Stats extends AutoCloseable {
|
|||
timers().printSummary();
|
||||
logger.info("-".repeat(40));
|
||||
for (var entry : monitoredFiles().entrySet()) {
|
||||
long size = FileUtils.size(entry.getValue());
|
||||
long size = entry.getValue().sizeProvider().getAsLong();
|
||||
if (size > 0) {
|
||||
logger.info("\t{}\t{}B", entry.getKey(), format.storage(size, false));
|
||||
}
|
||||
|
@ -118,15 +118,20 @@ public interface Stats extends AutoCloseable {
|
|||
Timers timers();
|
||||
|
||||
/** Returns all the files being monitored. */
|
||||
Map<String, Path> monitoredFiles();
|
||||
Map<String, MonitoredFile> monitoredFiles();
|
||||
|
||||
/** Adds a stat that will track the size of a file or directory located at {@code path}. */
|
||||
default void monitorFile(String name, Path path) {
|
||||
monitorFile(name, path, null);
|
||||
}
|
||||
|
||||
default void monitorFile(String name, Path path, LongSupplier sizeProvider) {
|
||||
if (path != null) {
|
||||
monitoredFiles().put(name, path);
|
||||
monitoredFiles().put(name, new MonitoredFile(path, sizeProvider));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Adds a stat that will track the estimated in-memory size of {@code object}. */
|
||||
void monitorInMemoryObject(String name, MemoryEstimator.HasEstimate object);
|
||||
|
||||
|
@ -188,7 +193,7 @@ public interface Stats extends AutoCloseable {
|
|||
private InMemory() {}
|
||||
|
||||
private final Timers timers = new Timers();
|
||||
private final Map<String, Path> monitoredFiles = new ConcurrentSkipListMap<>();
|
||||
private final Map<String, MonitoredFile> monitoredFiles = new ConcurrentSkipListMap<>();
|
||||
private final Map<String, Long> dataErrors = new ConcurrentHashMap<>();
|
||||
|
||||
@Override
|
||||
|
@ -200,7 +205,7 @@ public interface Stats extends AutoCloseable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Path> monitoredFiles() {
|
||||
public Map<String, MonitoredFile> monitoredFiles() {
|
||||
return monitoredFiles;
|
||||
}
|
||||
|
||||
|
@ -242,4 +247,11 @@ public interface Stats extends AutoCloseable {
|
|||
|
||||
}
|
||||
}
|
||||
|
||||
record MonitoredFile(Path path, LongSupplier sizeProvider) {
|
||||
public MonitoredFile(Path path, LongSupplier sizeProvider) {
|
||||
this.path = path;
|
||||
this.sizeProvider = sizeProvider != null ? sizeProvider : () -> FileUtils.size(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
package com.onthegomap.planetiler.stream;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include;
|
||||
import com.fasterxml.jackson.annotation.JsonIncludeProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes.Type;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
|
@ -16,7 +12,6 @@ import com.onthegomap.planetiler.archive.TileArchiveConfig;
|
|||
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
|
||||
import com.onthegomap.planetiler.archive.TileEncodingResult;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
@ -27,8 +22,6 @@ import java.nio.file.Path;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -51,9 +44,6 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
|
|||
static final JsonMapper jsonMapper = JsonMapper.builder()
|
||||
.serializationInclusion(Include.NON_ABSENT)
|
||||
.addModule(new Jdk8Module())
|
||||
.addMixIn(TileArchiveMetadata.class, TileArchiveMetadataMixin.class)
|
||||
.addMixIn(Envelope.class, EnvelopeMixin.class)
|
||||
.addMixIn(CoordinateXY.class, CoordinateXYMixin.class)
|
||||
.build();
|
||||
|
||||
private final boolean writeTilesOnly;
|
||||
|
@ -209,22 +199,4 @@ public final class WriteableJsonStreamArchive extends WriteableStreamArchive {
|
|||
|
||||
record FinishEntry(TileArchiveMetadata metadata) implements Entry {}
|
||||
|
||||
private record TileArchiveMetadataMixin(
|
||||
|
||||
@JsonIgnore(false) Envelope bounds,
|
||||
|
||||
@JsonIgnore(false) CoordinateXY center,
|
||||
|
||||
@JsonIgnore(false) List<LayerAttrStats.VectorLayer> vectorLayers
|
||||
) {}
|
||||
|
||||
@JsonIncludeProperties({"minX", "maxX", "minY", "maxY"})
|
||||
private abstract static class EnvelopeMixin {
|
||||
@JsonCreator
|
||||
EnvelopeMixin(@JsonProperty("minX") double minX, @JsonProperty("maxX") double maxX,
|
||||
@JsonProperty("minY") double minY, @JsonProperty("maxY") double maxY) {}
|
||||
}
|
||||
|
||||
@JsonIncludeProperties({"x", "y"})
|
||||
private interface CoordinateXYMixin {}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import java.io.OutputStream;
|
|||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.function.Consumer;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
/**
|
||||
|
@ -83,7 +83,6 @@ public final class WriteableProtoStreamArchive extends WriteableStreamArchive {
|
|||
setIfNotNull(metaDataBuilder::setFormat, metadata.format());
|
||||
setIfNotNull(metaDataBuilder::setBounds, toExportData(metadata.bounds()));
|
||||
setIfNotNull(metaDataBuilder::setCenter, toExportData(metadata.center()));
|
||||
setIfNotNull(metaDataBuilder::setZoom, metadata.zoom());
|
||||
setIfNotNull(metaDataBuilder::setMinZoom, metadata.minzoom());
|
||||
setIfNotNull(metaDataBuilder::setMaxZoom, metadata.maxzoom());
|
||||
final StreamArchiveProto.TileCompression tileCompression = switch (metadata.tileCompression()) {
|
||||
|
@ -114,13 +113,14 @@ public final class WriteableProtoStreamArchive extends WriteableStreamArchive {
|
|||
.build();
|
||||
}
|
||||
|
||||
private static StreamArchiveProto.CoordinateXY toExportData(CoordinateXY coord) {
|
||||
private static StreamArchiveProto.Coordinate toExportData(Coordinate coord) {
|
||||
if (coord == null) {
|
||||
return null;
|
||||
}
|
||||
return StreamArchiveProto.CoordinateXY.newBuilder()
|
||||
return StreamArchiveProto.Coordinate.newBuilder()
|
||||
.setX(coord.getX())
|
||||
.setY(coord.getY())
|
||||
.setZ(coord.getZ())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,8 @@ package com.onthegomap.planetiler.stream;
|
|||
|
||||
import com.onthegomap.planetiler.archive.WriteableTileArchive;
|
||||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import com.onthegomap.planetiler.stats.Counter;
|
||||
import com.onthegomap.planetiler.util.CountingOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.UncheckedIOException;
|
||||
|
@ -34,6 +36,8 @@ import org.apache.logging.log4j.core.util.CloseShieldOutputStream;
|
|||
*/
|
||||
abstract class WriteableStreamArchive implements WriteableTileArchive {
|
||||
|
||||
private final Counter.MultiThreadCounter bytesWritten = Counter.newMultiThreadCounter();
|
||||
|
||||
private final OutputStream primaryOutputStream;
|
||||
private final OutputStreamSupplier outputStreamFactory;
|
||||
@SuppressWarnings("unused")
|
||||
|
@ -42,10 +46,11 @@ abstract class WriteableStreamArchive implements WriteableTileArchive {
|
|||
private final AtomicInteger tileWriterCounter = new AtomicInteger(0);
|
||||
|
||||
private WriteableStreamArchive(OutputStreamSupplier outputStreamFactory, StreamArchiveConfig config) {
|
||||
this.outputStreamFactory = outputStreamFactory;
|
||||
this.outputStreamFactory =
|
||||
i -> new CountingOutputStream(outputStreamFactory.newOutputStream(i), bytesWritten.counterForThread()::incBy);
|
||||
this.config = config;
|
||||
|
||||
this.primaryOutputStream = outputStreamFactory.newOutputStream(0);
|
||||
this.primaryOutputStream = this.outputStreamFactory.newOutputStream(0);
|
||||
}
|
||||
|
||||
protected WriteableStreamArchive(Path p, StreamArchiveConfig config) {
|
||||
|
@ -78,6 +83,11 @@ abstract class WriteableStreamArchive implements WriteableTileArchive {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public long bytesWritten() {
|
||||
return bytesWritten.get();
|
||||
}
|
||||
|
||||
protected abstract TileWriter newTileWriter(OutputStream outputStream);
|
||||
|
||||
protected final OutputStream getPrimaryOutputStream() {
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
package com.onthegomap.planetiler.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.function.LongConsumer;
|
||||
|
||||
/**
|
||||
* {@link OutputStream} decorator that notifies the callback about the written bytes.
|
||||
*/
|
||||
public class CountingOutputStream extends OutputStream {
|
||||
|
||||
private final OutputStream wrapped;
|
||||
private final LongConsumer writtenBytesConsumer;
|
||||
|
||||
public CountingOutputStream(OutputStream wrapped, LongConsumer writtenBytesConsumer) {
|
||||
this.wrapped = wrapped;
|
||||
this.writtenBytesConsumer = writtenBytesConsumer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int i) throws IOException {
|
||||
wrapped.write(i);
|
||||
writtenBytesConsumer.accept(1L);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b) throws IOException {
|
||||
wrapped.write(b);
|
||||
writtenBytesConsumer.accept(b.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b, int off, int len) throws IOException {
|
||||
wrapped.write(b, off, len);
|
||||
writtenBytesConsumer.accept(len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
wrapped.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
wrapped.close();
|
||||
}
|
||||
}
|
|
@ -2,7 +2,6 @@ package com.onthegomap.planetiler.util;
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.onthegomap.planetiler.archive.WriteableTileArchive;
|
||||
import com.onthegomap.planetiler.mbtiles.Mbtiles;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -21,7 +20,7 @@ import javax.annotation.concurrent.ThreadSafe;
|
|||
* thread-local handler that can update stats without contention.
|
||||
* </p>
|
||||
*
|
||||
* @see Mbtiles.MetadataJson
|
||||
* @see com.onthegomap.planetiler.archive.TileArchiveMetadata.TileArchiveMetadataJson
|
||||
* @see <a href="https://github.com/mapbox/mbtiles-spec/blob/master/1.3/spec.md#content">MBtiles spec</a>
|
||||
*/
|
||||
@ThreadSafe
|
||||
|
|
|
@ -33,13 +33,12 @@ message Metadata {
|
|||
string type = 5;
|
||||
string format = 6;
|
||||
Envelope bounds = 7;
|
||||
CoordinateXY center = 8;
|
||||
double zoom = 9;
|
||||
int32 min_zoom = 10;
|
||||
int32 max_zoom = 11;
|
||||
repeated VectorLayer vector_layers = 12;
|
||||
map<string, string> others = 13;
|
||||
TileCompression tile_compression = 14;
|
||||
Coordinate center = 8;
|
||||
int32 min_zoom = 9;
|
||||
int32 max_zoom = 10;
|
||||
repeated VectorLayer vector_layers = 11;
|
||||
map<string, string> others = 12;
|
||||
TileCompression tile_compression = 13;
|
||||
}
|
||||
|
||||
message Envelope {
|
||||
|
@ -49,9 +48,10 @@ message Envelope {
|
|||
double max_y = 4;
|
||||
}
|
||||
|
||||
message CoordinateXY {
|
||||
message Coordinate {
|
||||
double x = 1;
|
||||
double y = 2;
|
||||
double z = 3;
|
||||
}
|
||||
|
||||
message VectorLayer {
|
||||
|
|
|
@ -15,6 +15,7 @@ import com.onthegomap.planetiler.collection.LongLongMap;
|
|||
import com.onthegomap.planetiler.collection.LongLongMultimap;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.config.PlanetilerConfig;
|
||||
import com.onthegomap.planetiler.files.ReadableFilesArchive;
|
||||
import com.onthegomap.planetiler.geo.GeoUtils;
|
||||
import com.onthegomap.planetiler.geo.GeometryException;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
|
@ -1942,6 +1943,7 @@ class PlanetilerTests {
|
|||
"--output-format=proto",
|
||||
"--output-format=pbf",
|
||||
"--output-format=json",
|
||||
"--output-format=files",
|
||||
"--tile-compression=none",
|
||||
"--tile-compression=gzip",
|
||||
"--output-layerstats",
|
||||
|
@ -1953,7 +1955,18 @@ class PlanetilerTests {
|
|||
final TileCompression tileCompression = extractTileCompression(args);
|
||||
|
||||
final TileArchiveConfig.Format format = extractFormat(args);
|
||||
final Path output = tempDir.resolve("output." + format.id());
|
||||
final String outputUri;
|
||||
final Path outputPath;
|
||||
switch (format) {
|
||||
case FILES -> {
|
||||
outputPath = tempDir.resolve("output");
|
||||
outputUri = outputPath.toString() + "?format=files";
|
||||
}
|
||||
default -> {
|
||||
outputPath = tempDir.resolve("output." + format.id());
|
||||
outputUri = outputPath.toString();
|
||||
}
|
||||
}
|
||||
|
||||
final ReadableTileArchiveFactory readableTileArchiveFactory = switch (format) {
|
||||
case MBTILES -> Mbtiles::newReadOnlyDatabase;
|
||||
|
@ -1962,6 +1975,7 @@ class PlanetilerTests {
|
|||
case JSON -> InMemoryStreamArchive::fromJson;
|
||||
case PMTILES -> ReadablePmtiles::newReadFromFile;
|
||||
case PROTO, PBF -> InMemoryStreamArchive::fromProtobuf;
|
||||
case FILES -> p -> ReadableFilesArchive.newReader(p, Arguments.of());
|
||||
};
|
||||
|
||||
|
||||
|
@ -1983,7 +1997,7 @@ class PlanetilerTests {
|
|||
.addNaturalEarthSource("ne", TestUtils.pathToResource("natural_earth_vector.sqlite"))
|
||||
.addShapefileSource("shapefile", TestUtils.pathToResource("shapefile.zip"))
|
||||
.addGeoPackageSource("geopackage", TestUtils.pathToResource("geopackage.gpkg.zip"), null)
|
||||
.setOutput(output)
|
||||
.setOutput(outputUri)
|
||||
.run();
|
||||
|
||||
// make sure it got deleted after write
|
||||
|
@ -1991,7 +2005,7 @@ class PlanetilerTests {
|
|||
assertFalse(Files.exists(tempOsm));
|
||||
}
|
||||
|
||||
try (var db = readableTileArchiveFactory.create(output)) {
|
||||
try (var db = readableTileArchiveFactory.create(outputPath)) {
|
||||
int features = 0;
|
||||
var tileMap = TestUtils.getTileMap(db, tileCompression);
|
||||
for (var tile : tileMap.values()) {
|
||||
|
@ -2022,7 +2036,7 @@ class PlanetilerTests {
|
|||
}
|
||||
}
|
||||
|
||||
final Path layerstats = output.resolveSibling(output.getFileName().toString() + ".layerstats.tsv.gz");
|
||||
final Path layerstats = outputPath.resolveSibling(outputPath.getFileName().toString() + ".layerstats.tsv.gz");
|
||||
if (args.contains("--output-layerstats")) {
|
||||
assertTrue(Files.exists(layerstats));
|
||||
byte[] data = Files.readAllBytes(layerstats);
|
||||
|
@ -2063,7 +2077,7 @@ class PlanetilerTests {
|
|||
|
||||
// ensure tilestats standalone executable produces same output
|
||||
var standaloneLayerstatsOutput = tempDir.resolve("layerstats2.tsv.gz");
|
||||
TileSizeStats.main("--input=" + output, "--output=" + standaloneLayerstatsOutput);
|
||||
TileSizeStats.main("--input=" + outputPath, "--output=" + standaloneLayerstatsOutput);
|
||||
byte[] standaloneData = Files.readAllBytes(standaloneLayerstatsOutput);
|
||||
byte[] standaloneUncompressed = Gzip.gunzip(standaloneData);
|
||||
assertEquals(
|
||||
|
|
|
@ -15,8 +15,10 @@ import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
|
|||
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
|
||||
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
|
||||
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.onthegomap.planetiler.archive.ReadableTileArchive;
|
||||
import com.onthegomap.planetiler.archive.Tile;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
|
||||
import com.onthegomap.planetiler.archive.TileCompression;
|
||||
import com.onthegomap.planetiler.config.PlanetilerConfig;
|
||||
import com.onthegomap.planetiler.geo.GeoUtils;
|
||||
|
@ -26,6 +28,7 @@ import com.onthegomap.planetiler.mbtiles.Mbtiles;
|
|||
import com.onthegomap.planetiler.mbtiles.Verify;
|
||||
import com.onthegomap.planetiler.reader.SourceFeature;
|
||||
import com.onthegomap.planetiler.stats.Stats;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
|
@ -42,6 +45,8 @@ import java.util.HashSet;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
@ -73,6 +78,62 @@ public class TestUtils {
|
|||
public static final AffineTransformation TRANSFORM_TO_TILE = AffineTransformation
|
||||
.scaleInstance(256d / 4096d, 256d / 4096d);
|
||||
|
||||
public static final TileArchiveMetadata MAX_METADATA_DESERIALIZED =
|
||||
new TileArchiveMetadata("name", "description", "attribution", "version", "type", "format", new Envelope(0, 1, 2, 3),
|
||||
new Coordinate(1.3, 3.7, 1.0), 2, 3,
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer("vl0",
|
||||
ImmutableMap.of("1", LayerAttrStats.FieldType.BOOLEAN, "2", LayerAttrStats.FieldType.NUMBER, "3",
|
||||
LayerAttrStats.FieldType.STRING),
|
||||
Optional.of("description"), OptionalInt.of(1), OptionalInt.of(2)),
|
||||
new LayerAttrStats.VectorLayer("vl1",
|
||||
Map.of(),
|
||||
Optional.empty(), OptionalInt.empty(), OptionalInt.empty())
|
||||
)
|
||||
),
|
||||
ImmutableMap.of("a", "b", "c", "d"),
|
||||
TileCompression.GZIP);
|
||||
public static final String MAX_METADATA_SERIALIZED = """
|
||||
{
|
||||
"name":"name",
|
||||
"description":"description",
|
||||
"attribution":"attribution",
|
||||
"version":"version",
|
||||
"type":"type",
|
||||
"format":"format",
|
||||
"minzoom":"2",
|
||||
"maxzoom":"3",
|
||||
"compression":"gzip",
|
||||
"bounds":"0,2,1,3",
|
||||
"center":"1.3,3.7,1",
|
||||
"json": "{
|
||||
\\"vector_layers\\":[
|
||||
{
|
||||
\\"id\\":\\"vl0\\",
|
||||
\\"fields\\":{
|
||||
\\"1\\":\\"Boolean\\",
|
||||
\\"2\\":\\"Number\\",
|
||||
\\"3\\":\\"String\\"
|
||||
},
|
||||
\\"description\\":\\"description\\",
|
||||
\\"minzoom\\":1,
|
||||
\\"maxzoom\\":2
|
||||
},
|
||||
{
|
||||
\\"id\\":\\"vl1\\",
|
||||
\\"fields\\":{}
|
||||
}
|
||||
]
|
||||
}",
|
||||
"a":"b",
|
||||
"c":"d"
|
||||
}""".lines().map(String::trim).collect(Collectors.joining(""));
|
||||
|
||||
public static final TileArchiveMetadata MIN_METADATA_DESERIALIZED =
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null, null);
|
||||
public static final String MIN_METADATA_SERIALIZED = "{}";
|
||||
|
||||
public static List<Coordinate> newCoordinateList(double... coords) {
|
||||
List<Coordinate> result = new ArrayList<>(coords.length / 2);
|
||||
for (int i = 0; i < coords.length; i += 2) {
|
||||
|
|
|
@ -1,10 +1,20 @@
|
|||
package com.onthegomap.planetiler.archive;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
|
||||
class TileArchiveConfigTest {
|
||||
|
||||
|
@ -33,4 +43,95 @@ class TileArchiveConfigTest {
|
|||
assertEquals(TileArchiveConfig.Format.PMTILES,
|
||||
TileArchiveConfig.from("file:///output.mbtiles?format=pmtiles").format());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(TileArchiveConfig.Format.class)
|
||||
void testByFormatParam(TileArchiveConfig.Format format) {
|
||||
final var config = TileArchiveConfig.from("output?format=" + format.id());
|
||||
assertEquals(format, config.format());
|
||||
assertEquals(TileArchiveConfig.Scheme.FILE, config.scheme());
|
||||
assertEquals(Path.of("output").toAbsolutePath(), config.getLocalPath());
|
||||
assertEquals(Map.of("format", format.id()), config.options());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(TileArchiveConfig.Format.class)
|
||||
void testGetPathForMultiThreadedWriter(TileArchiveConfig.Format format) {
|
||||
final var config = TileArchiveConfig.from("output?format=" + format.id());
|
||||
if (!format.supportsConcurrentWrites()) {
|
||||
assertThrows(UnsupportedOperationException.class, () -> config.getPathForMultiThreadedWriter(0));
|
||||
assertThrows(UnsupportedOperationException.class, () -> config.getPathForMultiThreadedWriter(1));
|
||||
} else {
|
||||
assertEquals(config.getLocalPath(), config.getPathForMultiThreadedWriter(0));
|
||||
final Path p = config.getPathForMultiThreadedWriter(1);
|
||||
switch (format) {
|
||||
case FILES -> assertEquals(p, config.getLocalPath());
|
||||
default -> assertEquals(config.getLocalPath().getParent().resolve(Paths.get("output1")), p);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExistsForFilesArchive(@TempDir Path tempDir) throws IOException {
|
||||
final Path out = tempDir.resolve("outdir");
|
||||
final var config = TileArchiveConfig.from(out + "?format=files");
|
||||
assertFalse(config.exists());
|
||||
Files.createDirectory(out);
|
||||
assertFalse(config.exists());
|
||||
Files.createFile(out.resolve("1"));
|
||||
assertTrue(config.exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExistsForNonFilesArchive(@TempDir Path tempDir) throws IOException {
|
||||
final Path mbtilesOut = tempDir.resolve("out.mbtiles");
|
||||
final var config = TileArchiveConfig.from(mbtilesOut.toString());
|
||||
assertFalse(config.exists());
|
||||
Files.createFile(mbtilesOut);
|
||||
assertTrue(config.exists());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
// standard cases
|
||||
"output,FILES",
|
||||
"output.mbtiles,MBTILES",
|
||||
"output.pmtiles,PMTILES",
|
||||
"output.pbf,PBF",
|
||||
"output.proto,PROTO",
|
||||
"output.json,JSON",
|
||||
"output.csv,CSV",
|
||||
"output.tsv,TSV",
|
||||
// special cases
|
||||
"output.mbtiles/,FILES", // trailing slash => files - regardless of the extension
|
||||
"output/,FILES",
|
||||
"output.mbtiles/?format=proto,PROTO", // format query param has precedence
|
||||
"tiles/{x}/{y}/{z}.pbf,FILES"
|
||||
})
|
||||
void testPathMapping(String path, TileArchiveConfig.Format format) {
|
||||
final var config = TileArchiveConfig.from(path);
|
||||
assertEquals(format, config.format());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"a/output.mbtiles,a/output.mbtiles",
|
||||
"a/tiles/{x}/{y}/{z}.pbf,a/tiles",
|
||||
"a/tiles/{x}/{y}/{z}.pbf?format=proto,a/tiles/{x}/{y}/{z}.pbf"
|
||||
})
|
||||
void testLocalBasePath(String path, Path localBasePath, @TempDir Path tempDir) {
|
||||
final var config = TileArchiveConfig.from(tempDir.toString() + "/" + path);
|
||||
assertEquals(tempDir.resolve(localBasePath), config.getLocalBasePath());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"a/output.mbtiles,a/output.mbtiles",
|
||||
"a/tiles/{x}/{y}/{z}.pbf,a/tiles/{x}/{y}/{z}.pbf",
|
||||
"a/tiles/{x}/{y}/{z}.pbf?format=proto,a/tiles/{x}/{y}/{z}.pbf"
|
||||
})
|
||||
void testLocalPath(String path, Path localPath, @TempDir Path tempDir) {
|
||||
final var config = TileArchiveConfig.from(tempDir.toString() + "/" + path);
|
||||
assertEquals(tempDir.resolve(localPath), config.getLocalPath());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,20 +1,173 @@
|
|||
package com.onthegomap.planetiler.archive;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.onthegomap.planetiler.Profile;
|
||||
import com.onthegomap.planetiler.TestUtils;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.config.PlanetilerConfig;
|
||||
import com.onthegomap.planetiler.geo.GeoUtils;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
class TileArchiveMetadataTest {
|
||||
|
||||
// produced by tilelive-copy or rather mbutil - just reduced $.json.vector_layers[]
|
||||
private static final String SERIALIZED =
|
||||
"""
|
||||
{
|
||||
"attribution": "<a href=\\"https://www.openmaptiles.org/\\" target=\\"_blank\\">© OpenMapTiles</a> <a href=\\"https://www.openstreetmap.org/copyright\\" target=\\"_blank\\">© OpenStreetMap contributors</a>",\s
|
||||
"description": "A tileset showcasing all layers in OpenMapTiles. https://openmaptiles.org",\s
|
||||
"format": "pbf",\s
|
||||
"planetiler:version": "0.7-SNAPSHOT",\s
|
||||
"bounds": "7.40921,43.72335,7.44864,43.75169",\s
|
||||
"name": "OpenMapTiles",\s
|
||||
"planetiler:githash": "09c22c18268d9cc1371ed0b0af192e698abf54c7",\s
|
||||
"json": "{\\"vector_layers\\":[{\\"fields\\":{\\"class\\":\\"String\\",\\"ref\\":\\"String\\"},\\"id\\":\\"aeroway\\",\\"maxzoom\\":14,\\"minzoom\\":12}]}",\s
|
||||
"version": "3.14.0",\s
|
||||
"compression": "gzip",\s
|
||||
"minzoom": "0",\s
|
||||
"planetiler:osm:osmosisreplicationurl": "http://download.geofabrik.de/europe/monaco-updates",\s
|
||||
"maxzoom": "14",\s
|
||||
"planetiler:osm:osmosisreplicationseq": "3911",\s
|
||||
"type": "baselayer",\s
|
||||
"planetiler:buildtime": "2023-12-20T21:33:49.594Z",\s
|
||||
"planetiler:osm:osmosisreplicationtime": "2023-12-18T21:21:01Z",\s
|
||||
"center": "7.42892,43.73752,14"
|
||||
}
|
||||
""";
|
||||
|
||||
private static final TileArchiveMetadata DESERIALIZED = new TileArchiveMetadata(
|
||||
"OpenMapTiles",
|
||||
"A tileset showcasing all layers in OpenMapTiles. https://openmaptiles.org",
|
||||
"<a href=\"https://www.openmaptiles.org/\" target=\"_blank\">© OpenMapTiles</a> <a href=\"https://www.openstreetmap.org/copyright\" target=\"_blank\">© OpenStreetMap contributors</a>",
|
||||
"3.14.0",
|
||||
"baselayer",
|
||||
"pbf",
|
||||
new Envelope(7.40921, 7.44864, 43.72335, 43.75169),
|
||||
new Coordinate(7.42892, 43.73752, 14),
|
||||
0,
|
||||
14,
|
||||
new TileArchiveMetadata.TileArchiveMetadataJson(
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer(
|
||||
"aeroway",
|
||||
Map.of(
|
||||
"ref", LayerAttrStats.FieldType.STRING,
|
||||
"class", LayerAttrStats.FieldType.STRING
|
||||
),
|
||||
12,
|
||||
14
|
||||
)
|
||||
)
|
||||
),
|
||||
Map.of(
|
||||
"planetiler:version", "0.7-SNAPSHOT",
|
||||
"planetiler:githash", "09c22c18268d9cc1371ed0b0af192e698abf54c7",
|
||||
"planetiler:osm:osmosisreplicationurl", "http://download.geofabrik.de/europe/monaco-updates",
|
||||
"planetiler:osm:osmosisreplicationseq", "3911",
|
||||
"planetiler:buildtime", "2023-12-20T21:33:49.594Z",
|
||||
"planetiler:osm:osmosisreplicationtime", "2023-12-18T21:21:01Z"
|
||||
),
|
||||
TileCompression.GZIP
|
||||
);
|
||||
|
||||
private final JsonMapper jsonMapper = TileArchiveMetadataDeSer.mbtilesMapper();
|
||||
|
||||
private final JsonMapper jsonMapperStrict = TileArchiveMetadataDeSer.newBaseBuilder()
|
||||
.addMixIn(TileArchiveMetadata.class, TileArchiveMetadataDeSer.StrictDeserializationMixin.class)
|
||||
.build();
|
||||
|
||||
@Test
|
||||
void testDeserialization() throws JsonProcessingException {
|
||||
var actualDeserialized = jsonMapper.readValue(SERIALIZED, TileArchiveMetadata.class);
|
||||
assertEquals(DESERIALIZED, actualDeserialized);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSerialization() throws JsonProcessingException {
|
||||
|
||||
final ObjectNode o0 = (ObjectNode) jsonMapper.readTree(SERIALIZED);
|
||||
final ObjectNode o1 = (ObjectNode) jsonMapper.readTree(jsonMapper.writeValueAsString(DESERIALIZED));
|
||||
|
||||
// string-escaped JSON might change order => parse JSON
|
||||
TestUtils.assertSameJson(
|
||||
o0.get("json").asText(),
|
||||
o1.get("json").asText()
|
||||
);
|
||||
o0.remove("json");
|
||||
o1.remove("json");
|
||||
|
||||
assertEquals(o0, o1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCenterDeserialization() throws JsonProcessingException {
|
||||
|
||||
final String s0 = """
|
||||
{"center": null}
|
||||
""";
|
||||
assertNull(jsonMapper.readValue(s0, TileArchiveMetadata.class).center());
|
||||
|
||||
final String s1 = """
|
||||
{"center": "0.0,1.1"}
|
||||
""";
|
||||
assertEqualsCoordinate(new CoordinateXY(0.0, 1.1), jsonMapper.readValue(s1, TileArchiveMetadata.class).center());
|
||||
|
||||
final String s2 = """
|
||||
{"center": "0.0,1.1,14"}
|
||||
""";
|
||||
assertEqualsCoordinate(new Coordinate(0.0, 1.1, 14), jsonMapper.readValue(s2, TileArchiveMetadata.class).center());
|
||||
|
||||
final String s3 = """
|
||||
{"center": "0.0,1.1,14,42"}
|
||||
""";
|
||||
assertEqualsCoordinate(new Coordinate(0.0, 1.1, 14), jsonMapper.readValue(s3, TileArchiveMetadata.class).center());
|
||||
assertThrows(JsonMappingException.class, () -> jsonMapperStrict.readValue(s3, TileArchiveMetadata.class));
|
||||
|
||||
final String s4 = """
|
||||
{"center": "0.0"}
|
||||
""";
|
||||
assertNull(jsonMapper.readValue(s4, TileArchiveMetadata.class).center());
|
||||
assertThrows(JsonMappingException.class, () -> jsonMapperStrict.readValue(s4, TileArchiveMetadata.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBoundsDeserialization() throws JsonProcessingException {
|
||||
|
||||
final String s0 = """
|
||||
{"bounds": null}
|
||||
""";
|
||||
assertNull(jsonMapper.readValue(s0, TileArchiveMetadata.class).bounds());
|
||||
|
||||
final String s1 = """
|
||||
{"bounds": "1.0,2.0,3.0,4.0"}
|
||||
""";
|
||||
assertEquals(new Envelope(1.0, 3.0, 2.0, 4.0), jsonMapper.readValue(s1, TileArchiveMetadata.class).bounds());
|
||||
|
||||
final String s2 = """
|
||||
{"bounds": "1.0,2.0,3.0,4.0,5.0"}
|
||||
""";
|
||||
assertEquals(new Envelope(1.0, 3.0, 2.0, 4.0), jsonMapper.readValue(s2, TileArchiveMetadata.class).bounds());
|
||||
assertThrows(JsonMappingException.class, () -> jsonMapperStrict.readValue(s2, TileArchiveMetadata.class));
|
||||
|
||||
final String s3 = """
|
||||
{"bounds": "1.0"}
|
||||
""";
|
||||
assertNull(jsonMapper.readValue(s3, TileArchiveMetadata.class).bounds());
|
||||
assertThrows(JsonMappingException.class, () -> jsonMapperStrict.readValue(s3, TileArchiveMetadata.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testAddMetadataWorldBounds() {
|
||||
var bounds = GeoUtils.WORLD_LAT_LON_BOUNDS;
|
||||
|
@ -39,17 +192,34 @@ class TileArchiveMetadataTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void testToMap() {
|
||||
void testToMap() throws JsonProcessingException {
|
||||
var bounds = "-73.6632,41.1274,-69.7598,43.0185";
|
||||
var metadata = new TileArchiveMetadata(
|
||||
new Profile.NullProfile(),
|
||||
PlanetilerConfig.from(Arguments.of(Map.of(
|
||||
"bounds", bounds
|
||||
))));
|
||||
metadata = metadata.withLayerStats(
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer(
|
||||
"aeroway",
|
||||
Map.of(
|
||||
"ref", LayerAttrStats.FieldType.STRING,
|
||||
"class", LayerAttrStats.FieldType.STRING
|
||||
),
|
||||
12,
|
||||
14
|
||||
)
|
||||
)
|
||||
);
|
||||
var map = new TreeMap<>(metadata.toMap());
|
||||
assertNotNull(map.remove("planetiler:version"));
|
||||
map.remove("planetiler:githash");
|
||||
map.remove("planetiler:buildtime");
|
||||
TestUtils.assertSameJson(
|
||||
"[{\"id\":\"aeroway\",\"fields\":{\"ref\":\"String\",\"class\":\"String\"},\"minzoom\":12,\"maxzoom\":14}]",
|
||||
map.remove("vector_layers")
|
||||
);
|
||||
assertEquals(
|
||||
new TreeMap<>(Map.of(
|
||||
"name", "Null",
|
||||
|
@ -65,4 +235,9 @@ class TileArchiveMetadataTest {
|
|||
map
|
||||
);
|
||||
}
|
||||
|
||||
private static void assertEqualsCoordinate(Coordinate c0, Coordinate c1) {
|
||||
assertEquals(c0, c1);
|
||||
assertTrue(c0.equals3D(c1)); // Coordinate#equals checks 2D only...
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Map;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
class FilesArchiveUtilsTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf , , {z}/{x}/{y}.pbf
|
||||
, , {z}/{x}/{y}.pbf
|
||||
{x}/{y}/{z}.pbf , , {x}/{y}/{z}.pbf
|
||||
tiles/{z}/{x}/{y}.pbf , tiles, {z}/{x}/{y}.pbf
|
||||
tiles/z{z}/{x}/{y}.pbf , tiles, z{z}/{x}/{y}.pbf
|
||||
z{z}/x{x}/y{y}.pbf , , z{z}/x{x}/y{y}.pbf
|
||||
tiles/tile-{z}-{x}-{y}.pbf, tiles, tile-{z}-{x}-{y}.pbf
|
||||
/a , /a , {z}/{x}/{y}.pbf
|
||||
/ , / , {z}/{x}/{y}.pbf
|
||||
"""
|
||||
)
|
||||
void testBasePathWithTileSchemeEncoding(String shortcutBase, String actualBase, String tileScheme,
|
||||
@TempDir Path tempDir) {
|
||||
|
||||
final Path shortcutBasePath = makePath(shortcutBase, tempDir);
|
||||
final Path actualBasePath = makePath(actualBase, tempDir);
|
||||
|
||||
assertEquals(
|
||||
new FilesArchiveUtils.BasePathWithTileSchemeEncoding(
|
||||
actualBasePath,
|
||||
new TileSchemeEncoding(
|
||||
Paths.get(tileScheme).toString(),
|
||||
actualBasePath
|
||||
)
|
||||
),
|
||||
FilesArchiveUtils.basePathWithTileSchemeEncoding(Arguments.of(), shortcutBasePath)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBasePathWithTileSchemeEncodingPrefersArgOverShortcut() {
|
||||
final Path basePath = Paths.get("");
|
||||
final Path schemeShortcutPath = Paths.get("{x}", "{y}", "{z}.pbf");
|
||||
final Path schemeArgumentPath = Paths.get("x{x}", "y{y}", "z{z}.pbf");
|
||||
final Path shortcutPath = basePath.resolve(schemeShortcutPath);
|
||||
assertEquals(
|
||||
new FilesArchiveUtils.BasePathWithTileSchemeEncoding(
|
||||
basePath,
|
||||
new TileSchemeEncoding(
|
||||
schemeShortcutPath.toString(),
|
||||
basePath
|
||||
)
|
||||
),
|
||||
FilesArchiveUtils.basePathWithTileSchemeEncoding(Arguments.of(), shortcutPath)
|
||||
);
|
||||
assertEquals(
|
||||
new FilesArchiveUtils.BasePathWithTileSchemeEncoding(
|
||||
basePath,
|
||||
new TileSchemeEncoding(
|
||||
schemeArgumentPath.toString(),
|
||||
basePath
|
||||
)
|
||||
),
|
||||
FilesArchiveUtils.basePathWithTileSchemeEncoding(
|
||||
Arguments.of(Map.of(FilesArchiveUtils.OPTION_TILE_SCHEME, schemeArgumentPath.toString())), shortcutPath)
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf ,
|
||||
,
|
||||
{x}/{y}/{z}.pbf ,
|
||||
tiles/{z}/{x}/{y}.pbf , tiles
|
||||
tiles/z{z}/{x}/{y}.pbf , tiles
|
||||
z{z}/x{x}/y{y}.pbf ,
|
||||
tiles/tile-{z}-{x}-{y}.pbf, tiles
|
||||
/a , /a
|
||||
/ , /
|
||||
"""
|
||||
)
|
||||
void testCleanBasePath(String shortcutBase, String actualBase, @TempDir Path tempDir) {
|
||||
|
||||
assertEquals(
|
||||
makePath(actualBase, tempDir),
|
||||
FilesArchiveUtils.cleanBasePath(makePath(shortcutBase, tempDir))
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
private static Path makePath(String in, @TempDir Path tempDir) {
|
||||
if (in == null) {
|
||||
return Paths.get("");
|
||||
}
|
||||
if (in.startsWith("/")) {
|
||||
return tempDir.resolve(in.substring(1));
|
||||
}
|
||||
return Paths.get(in);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,201 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.onthegomap.planetiler.TestUtils;
|
||||
import com.onthegomap.planetiler.archive.Tile;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
class ReadableFilesArchiveTest {
|
||||
|
||||
@Test
|
||||
void testRead(@TempDir Path tempDir) throws IOException {
|
||||
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
|
||||
final List<Path> files = List.of(
|
||||
tilesDir.resolve(Paths.get("0", "0", "0.pbf")),
|
||||
tilesDir.resolve(Paths.get("1", "2", "3.pbf")),
|
||||
// invalid
|
||||
tilesDir.resolve(Paths.get("9", "9")),
|
||||
tilesDir.resolve(Paths.get("9", "x")),
|
||||
tilesDir.resolve(Paths.get("9", "8", "9")),
|
||||
tilesDir.resolve(Paths.get("9", "8", "9.")),
|
||||
tilesDir.resolve(Paths.get("9", "8", "x.pbf")),
|
||||
tilesDir.resolve(Paths.get("9", "b", "1.pbf")),
|
||||
tilesDir.resolve(Paths.get("a", "8", "1.pbf")),
|
||||
tilesDir.resolve(Paths.get("9", "7.pbf")),
|
||||
tilesDir.resolve(Paths.get("8.pbf"))
|
||||
);
|
||||
for (int i = 0; i < files.size(); i++) {
|
||||
final Path file = files.get(i);
|
||||
Files.createDirectories(file.getParent());
|
||||
Files.write(files.get(i), new byte[]{(byte) i});
|
||||
}
|
||||
|
||||
try (var reader = ReadableFilesArchive.newReader(tilesDir, Arguments.of())) {
|
||||
final List<Tile> tiles = reader.getAllTiles().stream().sorted().toList();
|
||||
assertEquals(
|
||||
List.of(
|
||||
new Tile(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}),
|
||||
new Tile(TileCoord.ofXYZ(2, 3, 1), new byte[]{1})
|
||||
),
|
||||
tiles
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testGetTileNotExists(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createDirectories(tilesDir);
|
||||
try (var reader = ReadableFilesArchive.newReader(tilesDir, Arguments.of())) {
|
||||
assertNull(reader.getTile(0, 0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailsToReadTileFromDir(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createDirectories(tilesDir.resolve(Paths.get("0", "0", "0.pbf")));
|
||||
try (var reader = ReadableFilesArchive.newReader(tilesDir, Arguments.of())) {
|
||||
assertThrows(UncheckedIOException.class, () -> reader.getTile(0, 0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRequiresExistingPath(@TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Arguments options = Arguments.of();
|
||||
assertThrows(IllegalArgumentException.class, () -> ReadableFilesArchive.newReader(tilesDir, options));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf, 3/1/2.pbf
|
||||
{x}/{y}/{z}.pbf, 1/2/3.pbf
|
||||
{x}-{y}-{z}.pbf, 1-2-3.pbf
|
||||
{x}/a/{y}/b{z}.pbf, 1/a/2/b3.pbf
|
||||
{z}/{x}/{y}.pbf.gz, 3/1/2.pbf.gz
|
||||
{z}/{xs}/{ys}.pbf, 3/000/001/000/002.pbf
|
||||
{z}/{x}/{ys}.pbf, 3/1/000/002.pbf
|
||||
{z}/{xs}/{y}.pbf, 3/000/001/2.pbf
|
||||
"""
|
||||
)
|
||||
void testReadCustomScheme(String tileScheme, Path tileFile, @TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
tileFile = tilesDir.resolve(tileFile);
|
||||
Files.createDirectories(tileFile.getParent());
|
||||
Files.write(tileFile, new byte[]{1});
|
||||
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_TILE_SCHEME, tileScheme));
|
||||
try (var archive = ReadableFilesArchive.newReader(tilesDir, options)) {
|
||||
assertEquals(
|
||||
List.of(TileCoord.ofXYZ(1, 2, 3)),
|
||||
archive.getAllTileCoords().stream().toList()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf , , 3/1/2.pbf
|
||||
tiles/{z}/{x}/{y}.pbf , tiles, tiles/3/1/2.pbf
|
||||
tiles/z{z}/{x}/{y}.pbf , tiles, tiles/z3/1/2.pbf
|
||||
z{z}/x{x}/y{y}.pbf , , z3/x1/y2.pbf
|
||||
tiles/tile-{z}-{x}-{y}.pbf, tiles, tiles/tile-3-1-2.pbf
|
||||
"""
|
||||
)
|
||||
void testTileSchemeFromBasePath(Path shortcutBasePath, Path actualBasePath, Path tileFile, @TempDir Path tempDir)
|
||||
throws IOException {
|
||||
final Path testBase = tempDir.resolve("tiles");
|
||||
|
||||
shortcutBasePath = testBase.resolve(shortcutBasePath);
|
||||
actualBasePath = testBase.resolve(Objects.requireNonNullElse(actualBasePath, Paths.get("")));
|
||||
tileFile = testBase.resolve(tileFile);
|
||||
Files.createDirectories(tileFile.getParent());
|
||||
Files.write(tileFile, new byte[]{1});
|
||||
|
||||
final Path metadataFile = actualBasePath.resolve("metadata.json");
|
||||
Files.writeString(metadataFile, TestUtils.MAX_METADATA_SERIALIZED);
|
||||
|
||||
try (var archive = ReadableFilesArchive.newReader(shortcutBasePath, Arguments.of())) {
|
||||
assertEquals(
|
||||
List.of(TileCoord.ofXYZ(1, 2, 3)),
|
||||
archive.getAllTileCoords().stream().toList()
|
||||
);
|
||||
assertNotNull(archive.metadata());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testHasNoMetaData(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createDirectories(tilesDir);
|
||||
try (var reader = ReadableFilesArchive.newReader(tilesDir, Arguments.of())) {
|
||||
assertNull(reader.metadata());
|
||||
}
|
||||
}
|
||||
|
||||
private void testMetadata(Path basePath, Arguments options, Path metadataPath) throws IOException {
|
||||
try (var reader = ReadableFilesArchive.newReader(basePath, options)) {
|
||||
assertNull(reader.metadata());
|
||||
|
||||
Files.writeString(metadataPath, TestUtils.MAX_METADATA_SERIALIZED);
|
||||
assertEquals(TestUtils.MAX_METADATA_DESERIALIZED, reader.metadata());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataDefault(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = Files.createDirectories(tempDir.resolve("tiles"));
|
||||
testMetadata(tilesDir, Arguments.of(), tilesDir.resolve("metadata.json"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataRelative(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = Files.createDirectories(tempDir.resolve("tiles"));
|
||||
final Path meteadataPath = tilesDir.resolve("x.y");
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, "x.y"));
|
||||
testMetadata(tilesDir, options, meteadataPath);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataAbsolute(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = Files.createDirectories(tempDir.resolve("tiles"));
|
||||
final Path meteadataPath = Files.createDirectories(tempDir.resolve(Paths.get("abs"))).resolve("x.y");
|
||||
final Arguments options =
|
||||
Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, meteadataPath.toAbsolutePath().toString()));
|
||||
testMetadata(tilesDir, options, meteadataPath);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataNone(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = Files.createDirectories(tempDir.resolve("tiles"));
|
||||
final Path meteadataPath = tilesDir.resolve("none");
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, "none"));
|
||||
|
||||
try (var reader = ReadableFilesArchive.newReader(tilesDir, options)) {
|
||||
assertNull(reader.metadata());
|
||||
|
||||
Files.writeString(meteadataPath, TestUtils.MAX_METADATA_SERIALIZED);
|
||||
assertNull(reader.metadata());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,138 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Optional;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
class TileSchemeEncodingTest {
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf, 3/1/2.pbf
|
||||
{x}/{y}/{z}.pbf, 1/2/3.pbf
|
||||
{x}-{y}-{z}.pbf, 1-2-3.pbf
|
||||
{x}/a/{y}/b{z}.pbf, 1/a/2/b3.pbf
|
||||
{z}/{x}/{y}.pbf.gz, 3/1/2.pbf.gz
|
||||
{z}/{xs}/{ys}.pbf, 3/000/001/000/002.pbf
|
||||
{z}/{x}/{ys}.pbf, 3/1/000/002.pbf
|
||||
{z}/{xs}/{y}.pbf, 3/000/001/2.pbf
|
||||
"""
|
||||
)
|
||||
void testEncoder(String tileScheme, Path tilePath, @TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
tilePath = tilesDir.resolve(tilePath);
|
||||
assertEquals(
|
||||
tilePath,
|
||||
new TileSchemeEncoding(tileScheme, tilesDir).encoder().apply(TileCoord.ofXYZ(1, 2, 3))
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf, 3/1/2.pbf, true
|
||||
{x}/{y}/{z}.pbf, 1/2/3.pbf, true
|
||||
{x}-{y}-{z}.pbf, 1-2-3.pbf, true
|
||||
{x}/a/{y}/b{z}.pbf, 1/a/2/b3.pbf, true
|
||||
{z}/{x}/{y}.pbf.gz, 3/1/2.pbf.gz, true
|
||||
{z}/{xs}/{ys}.pbf, 3/000/001/000/002.pbf, true
|
||||
{z}/{x}/{ys}.pbf, 3/1/000/002.pbf, true
|
||||
{z}/{xs}/{y}.pbf, 3/000/001/2.pbf, true
|
||||
|
||||
{z}/{x}/{y}.pbf, 3/1/2.pb, false
|
||||
{z}/{x}/{y}.pbf, 3/1/2, false
|
||||
{z}/{x}/{y}.pbf, a/1/2.pbf, false
|
||||
{z}/{x}/{y}.pbf, 3/a/2.pbf, false
|
||||
{z}/{x}/{y}.pbf, 3/1/a.pbf, false
|
||||
"""
|
||||
)
|
||||
void testDecoder(String tileScheme, Path tilePath, boolean valid, @TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
tilePath = tilesDir.resolve(tilePath);
|
||||
if (valid) {
|
||||
assertEquals(
|
||||
Optional.of(TileCoord.ofXYZ(1, 2, 3)),
|
||||
new TileSchemeEncoding(tileScheme, tilesDir).decoder().apply(tilePath)
|
||||
);
|
||||
} else {
|
||||
assertEquals(
|
||||
Optional.empty(),
|
||||
new TileSchemeEncoding(tileScheme, tilesDir).decoder().apply(tilePath)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf, 3
|
||||
{x}/{y}/{z}.pbf, 3
|
||||
{x}-{y}-{z}.pbf, 1
|
||||
{x}/a/{y}/b{z}.pbf, 4
|
||||
{z}/{x}/{y}.pbf.gz, 3
|
||||
{z}/{xs}/{ys}.pbf, 5
|
||||
{z}/{x}/{ys}.pbf, 4
|
||||
{z}/{xs}/{y}.pbf, 4
|
||||
"""
|
||||
)
|
||||
void testSearchDepth(String tileScheme, int searchDepth, @TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
assertEquals(
|
||||
searchDepth,
|
||||
new TileSchemeEncoding(tileScheme, tilesDir).searchDepth()
|
||||
);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
1/2/3.pbf
|
||||
{x}/{y}.pbf
|
||||
{z}/{y}.pbf
|
||||
{z}/{x}
|
||||
{z}/{x}/1.pbf
|
||||
{z}/{x}/{y}/{xs}.pbf
|
||||
{z}/{x}/{y}/{ys}.pbf
|
||||
{z}/{z}/{x}/{y}.pbf
|
||||
{x}/{z}/{x}/{y}.pbf
|
||||
{y}/{z}/{x}/{y}.pbf
|
||||
{xs}/{z}/{xs}/{ys}.pbf
|
||||
{ys}/{z}/{xs}/{ys}.pbf
|
||||
{x}/\\Q/{y}/b{z}.pbf
|
||||
{x}/\\E/{y}/b{z}.pbf
|
||||
"""
|
||||
)
|
||||
void testInvalidSchemes(String tileScheme, @TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
assertThrows(Exception.class, () -> new TileSchemeEncoding(tileScheme, tilesDir));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInvalidAbsoluteTileScheme(@TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final String tileSchemeAbsolute = tilesDir.resolve(Paths.get("{z}", "{x}", "{y}.pbf")).toAbsolutePath().toString();
|
||||
assertThrows(Exception.class, () -> new TileSchemeEncoding(tileSchemeAbsolute, tilesDir));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({
|
||||
"{z}/{x}/{y}.pbf, TMS",
|
||||
"{z}/{xs}/{ys}.pbf, TMS",
|
||||
// given there is no (suitable) other tile order yet - use TMS here as wel
|
||||
"{x}/{y}/{z}.pbf, TMS"
|
||||
})
|
||||
void testPreferredTileOrder(String tileScheme, TileOrder tileOrder, @TempDir Path tempDir) {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
assertEquals(
|
||||
tileOrder,
|
||||
new TileSchemeEncoding(tileScheme, tilesDir).preferredTileOrder()
|
||||
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,260 @@
|
|||
package com.onthegomap.planetiler.files;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import com.onthegomap.planetiler.TestUtils;
|
||||
import com.onthegomap.planetiler.archive.TileEncodingResult;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.geo.TileOrder;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.stream.Stream;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.CsvSource;
|
||||
|
||||
class WriteableFilesArchiveTest {
|
||||
|
||||
@Test
|
||||
void testWrite(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), false)) {
|
||||
archive.initialize();
|
||||
try (var tileWriter = archive.newTileWriter()) {
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.of(1)));
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 3, 3), new byte[]{2}, OptionalLong.of(2)));
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 3, 4), new byte[]{3}, OptionalLong.of(3)));
|
||||
}
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
|
||||
try (Stream<Path> s = Files.find(tilesDir, 100, (p, attrs) -> attrs.isRegularFile())) {
|
||||
final List<Path> filesInDir = s.sorted().toList();
|
||||
assertEquals(
|
||||
List.of(
|
||||
Paths.get("0", "0", "0.pbf"),
|
||||
Paths.get("3", "1", "2.pbf"),
|
||||
Paths.get("3", "1", "3.pbf"),
|
||||
Paths.get("4", "1", "3.pbf"),
|
||||
Paths.get("metadata.json")
|
||||
),
|
||||
filesInDir.stream().map(tilesDir::relativize).toList()
|
||||
);
|
||||
assertArrayEquals(new byte[]{0}, Files.readAllBytes(filesInDir.get(0)));
|
||||
assertArrayEquals(new byte[]{1}, Files.readAllBytes(filesInDir.get(1)));
|
||||
assertArrayEquals(new byte[]{2}, Files.readAllBytes(filesInDir.get(2)));
|
||||
assertArrayEquals(new byte[]{3}, Files.readAllBytes(filesInDir.get(3)));
|
||||
TestUtils.assertSameJson(
|
||||
TestUtils.MAX_METADATA_SERIALIZED,
|
||||
Files.readString(filesInDir.get(4))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf, 3/1/2.pbf
|
||||
{x}/{y}/{z}.pbf, 1/2/3.pbf
|
||||
{x}-{y}-{z}.pbf, 1-2-3.pbf
|
||||
{x}/a/{y}/b{z}.pbf, 1/a/2/b3.pbf
|
||||
{z}/{x}/{y}.pbf.gz, 3/1/2.pbf.gz
|
||||
{z}/{xs}/{ys}.pbf, 3/000/001/000/002.pbf
|
||||
{z}/{x}/{ys}.pbf, 3/1/000/002.pbf
|
||||
{z}/{xs}/{y}.pbf, 3/000/001/2.pbf
|
||||
"""
|
||||
)
|
||||
void testWriteCustomScheme(String tileScheme, Path expectedFile, @TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
expectedFile = tilesDir.resolve(expectedFile);
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_TILE_SCHEME, tileScheme));
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, options, false)) {
|
||||
try (var tileWriter = archive.newTileWriter()) {
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.empty()));
|
||||
}
|
||||
}
|
||||
assertTrue(Files.exists(expectedFile));
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource(textBlock = """
|
||||
{z}/{x}/{y}.pbf , , 3/1/2.pbf
|
||||
tiles/{z}/{x}/{y}.pbf , tiles, tiles/3/1/2.pbf
|
||||
tiles/z{z}/{x}/{y}.pbf , tiles, tiles/z3/1/2.pbf
|
||||
z{z}/x{x}/y{y}.pbf , , z3/x1/y2.pbf
|
||||
tiles/tile-{z}-{x}-{y}.pbf, tiles, tiles/tile-3-1-2.pbf
|
||||
"""
|
||||
)
|
||||
void testTileSchemeFromBasePath(Path shortcutBasePath, Path actualBasePath, Path tileFile, @TempDir Path tempDir)
|
||||
throws IOException {
|
||||
final Path testBase = tempDir.resolve("tiles");
|
||||
|
||||
shortcutBasePath = testBase.resolve(shortcutBasePath);
|
||||
actualBasePath = testBase.resolve(Objects.requireNonNullElse(actualBasePath, Paths.get("")));
|
||||
tileFile = testBase.resolve(tileFile);
|
||||
|
||||
try (var archive = WriteableFilesArchive.newWriter(shortcutBasePath, Arguments.of(), false)) {
|
||||
try (var tileWriter = archive.newTileWriter()) {
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.empty()));
|
||||
}
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
|
||||
assertTrue(Files.exists(tileFile));
|
||||
assertTrue(Files.exists(actualBasePath.resolve("metadata.json")));
|
||||
}
|
||||
|
||||
private void testMetadataWrite(Arguments options, Path archiveOutput, Path metadataTilesDir) throws IOException {
|
||||
try (var archive = WriteableFilesArchive.newWriter(archiveOutput, options, false)) {
|
||||
archive.initialize();
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
|
||||
assertTrue(Files.exists(metadataTilesDir));
|
||||
TestUtils.assertSameJson(
|
||||
TestUtils.MAX_METADATA_SERIALIZED,
|
||||
Files.readString(metadataTilesDir)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataWriteDefault(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
testMetadataWrite(Arguments.of(), tilesDir, tilesDir.resolve("metadata.json"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataWriteRelative(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, "x.y"));
|
||||
testMetadataWrite(options, tilesDir, tilesDir.resolve("x.y"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataWriteAbsolute(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Path p = Files.createDirectory(tempDir.resolve("abs")).toAbsolutePath().resolve("abc.json");
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, p.toString()));
|
||||
testMetadataWrite(options, tilesDir, p);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataWriteNone(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, "none"));
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, options, false)) {
|
||||
archive.initialize();
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
try (Stream<Path> ps = Files.find(tilesDir, 100, (p, a) -> a.isRegularFile())) {
|
||||
assertEquals(List.of(), ps.toList());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataFailsIfNotFile(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Arguments options = Arguments.of(Map.of(FilesArchiveUtils.OPTION_METADATA_PATH, tilesDir.toString()));
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, options, false)) {
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataOverwriteOff(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createDirectory(tilesDir);
|
||||
Files.writeString(tilesDir.resolve("metadata.json"), "something");
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), false)) {
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMetadataOverwriteOn(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Path metadataPath = tilesDir.resolve("metadata.json");
|
||||
Files.createDirectory(tilesDir);
|
||||
Files.writeString(metadataPath, "something");
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), true)) {
|
||||
archive.initialize();
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
TestUtils.assertSameJson(
|
||||
TestUtils.MAX_METADATA_SERIALIZED,
|
||||
Files.readString(metadataPath)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCreatesPathIfNotExists(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), false)) {
|
||||
try (var writer = archive.newTileWriter()) {
|
||||
writer.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
|
||||
}
|
||||
}
|
||||
assertTrue(Files.isRegularFile(tilesDir.resolve(Paths.get("0", "0", "0.pbf"))));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailsIfBasePathIsNoDirectory(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createFile(tilesDir);
|
||||
final Arguments options = Arguments.of();
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> WriteableFilesArchive.newWriter(tilesDir, options, false)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailsIfTileExistsAsDir(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
final Path tileAsDirPath = tilesDir.resolve(Paths.get("0", "0", "0.pbf"));
|
||||
Files.createDirectories(tileAsDirPath);
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), false)) {
|
||||
try (var writer = archive.newTileWriter()) {
|
||||
final var r = new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty());
|
||||
assertThrows(UncheckedIOException.class, () -> writer.write(r));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailsIfDirExistsAsFile(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createDirectories(tilesDir);
|
||||
Files.createFile(tilesDir.resolve("0"));
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), false)) {
|
||||
try (var writer = archive.newTileWriter()) {
|
||||
final var r = new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty());
|
||||
assertThrows(IllegalStateException.class, () -> writer.write(r));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSettings(@TempDir Path tempDir) throws IOException {
|
||||
final Path tilesDir = tempDir.resolve("tiles");
|
||||
Files.createDirectories(tilesDir);
|
||||
try (var archive = WriteableFilesArchive.newWriter(tilesDir, Arguments.of(), false)) {
|
||||
assertFalse(archive.deduplicates());
|
||||
assertEquals(TileOrder.TMS, archive.tileOrder());
|
||||
|
||||
}
|
||||
}
|
||||
}
|
|
@ -27,7 +27,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
class MbtilesTest {
|
||||
|
@ -147,21 +147,10 @@ class MbtilesTest {
|
|||
|
||||
@Test
|
||||
void testRoundTripMetadata() throws IOException {
|
||||
roundTripMetadata(new TileArchiveMetadata(
|
||||
"MyName",
|
||||
"MyDescription",
|
||||
"MyAttribution",
|
||||
"MyVersion",
|
||||
"baselayer",
|
||||
TileArchiveMetadata.MVT_FORMAT,
|
||||
new Envelope(1, 2, 3, 4),
|
||||
new CoordinateXY(5, 6),
|
||||
7d,
|
||||
8,
|
||||
9,
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of())),
|
||||
Map.of("other key", "other value"),
|
||||
TileCompression.GZIP
|
||||
roundTripMetadata(metadataWithJson(
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of()))
|
||||
)
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -176,11 +165,13 @@ class MbtilesTest {
|
|||
"baselayer",
|
||||
TileArchiveMetadata.MVT_FORMAT,
|
||||
new Envelope(1, 2, 3, 4),
|
||||
new CoordinateXY(5, 6),
|
||||
7d,
|
||||
new Coordinate(5, 6, 7d),
|
||||
8,
|
||||
9,
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of())),
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of()))
|
||||
|
||||
),
|
||||
Map.of("other key", "other value"),
|
||||
null
|
||||
);
|
||||
|
@ -193,11 +184,12 @@ class MbtilesTest {
|
|||
"baselayer",
|
||||
TileArchiveMetadata.MVT_FORMAT,
|
||||
new Envelope(1, 2, 3, 4),
|
||||
new CoordinateXY(5, 6),
|
||||
7d,
|
||||
new Coordinate(5, 6, 7d),
|
||||
8,
|
||||
9,
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of())),
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of()))
|
||||
),
|
||||
Map.of("other key", "other value"),
|
||||
TileCompression.GZIP
|
||||
);
|
||||
|
@ -208,7 +200,7 @@ class MbtilesTest {
|
|||
@Test
|
||||
void testRoundTripMinimalMetadata() throws IOException {
|
||||
var empty =
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null, Map.of(),
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, Map.of(),
|
||||
TileCompression.GZIP);
|
||||
roundTripMetadata(empty);
|
||||
try (Mbtiles db = Mbtiles.newInMemoryDatabase()) {
|
||||
|
@ -231,10 +223,30 @@ class MbtilesTest {
|
|||
}
|
||||
}
|
||||
|
||||
private void testMetadataJson(Mbtiles.MetadataJson object, String expected) throws IOException {
|
||||
private static TileArchiveMetadata metadataWithJson(TileArchiveMetadata.TileArchiveMetadataJson metadataJson) {
|
||||
return new TileArchiveMetadata(
|
||||
"MyName",
|
||||
"MyDescription",
|
||||
"MyAttribution",
|
||||
"MyVersion",
|
||||
"baselayer",
|
||||
TileArchiveMetadata.MVT_FORMAT,
|
||||
new Envelope(1, 2, 3, 4),
|
||||
new Coordinate(5, 6, 7d),
|
||||
8,
|
||||
9,
|
||||
metadataJson,
|
||||
Map.of("other key", "other value"),
|
||||
TileCompression.GZIP
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
private void testMetadataJson(TileArchiveMetadata.TileArchiveMetadataJson metadataJson, String expected)
|
||||
throws IOException {
|
||||
try (Mbtiles db = Mbtiles.newInMemoryDatabase()) {
|
||||
var metadata = db.createTablesWithoutIndexes().metadataTable();
|
||||
metadata.setJson(object);
|
||||
metadata.set(metadataWithJson(metadataJson));
|
||||
var actual = metadata.getAll().get("json");
|
||||
assertSameJson(expected, actual);
|
||||
}
|
||||
|
@ -242,7 +254,7 @@ class MbtilesTest {
|
|||
|
||||
@Test
|
||||
void testMetadataJsonNoLayers() throws IOException {
|
||||
testMetadataJson(new Mbtiles.MetadataJson(), """
|
||||
testMetadataJson(TileArchiveMetadata.TileArchiveMetadataJson.create(List.of()), """
|
||||
{
|
||||
"vector_layers": []
|
||||
}
|
||||
|
@ -251,20 +263,22 @@ class MbtilesTest {
|
|||
|
||||
@Test
|
||||
void testFullMetadataJson() throws IOException {
|
||||
testMetadataJson(new Mbtiles.MetadataJson(
|
||||
new LayerAttrStats.VectorLayer(
|
||||
"full",
|
||||
Map.of(
|
||||
"NUMBER_FIELD", LayerAttrStats.FieldType.NUMBER,
|
||||
"STRING_FIELD", LayerAttrStats.FieldType.STRING,
|
||||
"boolean field", LayerAttrStats.FieldType.BOOLEAN
|
||||
testMetadataJson(new TileArchiveMetadata.TileArchiveMetadataJson(
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer(
|
||||
"full",
|
||||
Map.of(
|
||||
"NUMBER_FIELD", LayerAttrStats.FieldType.NUMBER,
|
||||
"STRING_FIELD", LayerAttrStats.FieldType.STRING,
|
||||
"boolean field", LayerAttrStats.FieldType.BOOLEAN
|
||||
)
|
||||
).withDescription("full description")
|
||||
.withMinzoom(0)
|
||||
.withMaxzoom(5),
|
||||
new LayerAttrStats.VectorLayer(
|
||||
"partial",
|
||||
Map.of()
|
||||
)
|
||||
).withDescription("full description")
|
||||
.withMinzoom(0)
|
||||
.withMaxzoom(5),
|
||||
new LayerAttrStats.VectorLayer(
|
||||
"partial",
|
||||
Map.of()
|
||||
)
|
||||
), """
|
||||
{
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.stream.Collectors;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
class PmtilesTest {
|
||||
|
@ -221,11 +221,12 @@ class PmtilesTest {
|
|||
"baselayer",
|
||||
TileArchiveMetadata.MVT_FORMAT,
|
||||
new Envelope(1.1, 2.2, 3.3, 4.4),
|
||||
new CoordinateXY(5.5, 6.6),
|
||||
7d,
|
||||
new Coordinate(5.5, 6.6, 7d),
|
||||
8,
|
||||
9,
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of())),
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(
|
||||
List.of(new LayerAttrStats.VectorLayer("MyLayer", Map.of()))
|
||||
),
|
||||
Map.of("other key", "other value"),
|
||||
TileCompression.GZIP
|
||||
));
|
||||
|
@ -235,12 +236,11 @@ class PmtilesTest {
|
|||
@EnumSource(value = TileCompression.class, names = {"GZIP", "NONE"})
|
||||
void testRoundtripMetadataMinimal(TileCompression tileCompression) throws IOException {
|
||||
roundTripMetadata(
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null, Map.of(),
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, Map.of(),
|
||||
tileCompression),
|
||||
new TileArchiveMetadata(null, null, null, null, null, null,
|
||||
new Envelope(-180, 180, -85.0511287, 85.0511287),
|
||||
new CoordinateXY(0, 0),
|
||||
0d,
|
||||
new Coordinate(0, 0, 0d),
|
||||
0,
|
||||
15,
|
||||
null,
|
||||
|
|
|
@ -23,7 +23,7 @@ class WriteableCsvArchiveTest {
|
|||
|
||||
private static final StreamArchiveConfig defaultConfig = new StreamArchiveConfig(false, Arguments.of());
|
||||
private static final TileArchiveMetadata defaultMetadata =
|
||||
new TileArchiveMetadata("start", null, null, null, null, null, null, null, null, null, null, null, null, null);
|
||||
new TileArchiveMetadata("start", null, null, null, null, null, null, null, null, null, null, null, null);
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(value = TileArchiveConfig.Format.class, names = {"CSV", "TSV"})
|
||||
|
|
|
@ -6,91 +6,25 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
|
|||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.onthegomap.planetiler.archive.TileArchiveMetadata;
|
||||
import com.onthegomap.planetiler.archive.TileCompression;
|
||||
import com.onthegomap.planetiler.TestUtils;
|
||||
import com.onthegomap.planetiler.archive.TileEncodingResult;
|
||||
import com.onthegomap.planetiler.config.Arguments;
|
||||
import com.onthegomap.planetiler.geo.TileCoord;
|
||||
import com.onthegomap.planetiler.util.LayerAttrStats;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.OptionalLong;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
class WriteableJsonStreamArchiveTest {
|
||||
|
||||
private static final StreamArchiveConfig defaultConfig = new StreamArchiveConfig(false, Arguments.of());
|
||||
private static final TileArchiveMetadata MAX_METADATA_IN =
|
||||
new TileArchiveMetadata("name", "description", "attribution", "version", "type", "format", new Envelope(0, 1, 2, 3),
|
||||
new CoordinateXY(1.3, 3.7), 1.0, 2, 3,
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer("vl0",
|
||||
ImmutableMap.of("1", LayerAttrStats.FieldType.BOOLEAN, "2", LayerAttrStats.FieldType.NUMBER, "3",
|
||||
LayerAttrStats.FieldType.STRING),
|
||||
Optional.of("description"), OptionalInt.of(1), OptionalInt.of(2)),
|
||||
new LayerAttrStats.VectorLayer("vl1",
|
||||
Map.of(),
|
||||
Optional.empty(), OptionalInt.empty(), OptionalInt.empty())
|
||||
),
|
||||
ImmutableMap.of("a", "b", "c", "d"),
|
||||
TileCompression.GZIP);
|
||||
private static final String MAX_METADATA_OUT = """
|
||||
{
|
||||
"name":"name",
|
||||
"description":"description",
|
||||
"attribution":"attribution",
|
||||
"version":"version",
|
||||
"type":"type",
|
||||
"format":"format",
|
||||
"zoom":1.0,
|
||||
"minzoom":2,
|
||||
"maxzoom":3,
|
||||
"compression":"gzip",
|
||||
"bounds":{
|
||||
"minX":0.0,
|
||||
"maxX":1.0,
|
||||
"minY":2.0,
|
||||
"maxY":3.0
|
||||
},
|
||||
"center":{
|
||||
"x":1.3,"y":3.7
|
||||
},
|
||||
"vectorLayers":[
|
||||
{
|
||||
"id":"vl0",
|
||||
"fields":{
|
||||
"1":"Boolean",
|
||||
"2":"Number",
|
||||
"3":"String"
|
||||
},
|
||||
"description":"description",
|
||||
"minzoom":1,
|
||||
"maxzoom":2
|
||||
},
|
||||
{
|
||||
"id":"vl1",
|
||||
"fields":{}
|
||||
}
|
||||
],
|
||||
"a":"b",
|
||||
"c":"d"
|
||||
}""".lines().map(String::trim).collect(Collectors.joining(""));
|
||||
|
||||
private static final TileArchiveMetadata MIN_METADATA_IN =
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null, null, null);
|
||||
private static final String MIN_METADATA_OUT = "{}";
|
||||
|
||||
@Test
|
||||
void testWriteToSingleFile(@TempDir Path tempDir) throws IOException {
|
||||
|
@ -103,7 +37,7 @@ class WriteableJsonStreamArchiveTest {
|
|||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.of(1)));
|
||||
}
|
||||
archive.finish(MIN_METADATA_IN);
|
||||
archive.finish(TestUtils.MIN_METADATA_DESERIALIZED);
|
||||
}
|
||||
|
||||
assertEqualsDelimitedJson(
|
||||
|
@ -112,7 +46,7 @@ class WriteableJsonStreamArchiveTest {
|
|||
{"type":"tile","x":0,"y":0,"z":0,"encodedData":"AA=="}
|
||||
{"type":"tile","x":1,"y":2,"z":3,"encodedData":"AQ=="}
|
||||
{"type":"finish","metadata":%s}
|
||||
""".formatted(MIN_METADATA_OUT),
|
||||
""".formatted(TestUtils.MIN_METADATA_SERIALIZED),
|
||||
Files.readString(csvFile)
|
||||
);
|
||||
|
||||
|
@ -144,7 +78,7 @@ class WriteableJsonStreamArchiveTest {
|
|||
try (var tileWriter = archive.newTileWriter()) {
|
||||
tileWriter.write(tile4);
|
||||
}
|
||||
archive.finish(MAX_METADATA_IN);
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
|
||||
assertEqualsDelimitedJson(
|
||||
|
@ -153,7 +87,7 @@ class WriteableJsonStreamArchiveTest {
|
|||
{"type":"tile","x":11,"y":12,"z":1,"encodedData":"AA=="}
|
||||
{"type":"tile","x":21,"y":22,"z":2,"encodedData":"AQ=="}
|
||||
{"type":"finish","metadata":%s}
|
||||
""".formatted(MAX_METADATA_OUT),
|
||||
""".formatted(TestUtils.MAX_METADATA_SERIALIZED),
|
||||
Files.readString(csvFilePrimary)
|
||||
);
|
||||
|
||||
|
@ -203,7 +137,7 @@ class WriteableJsonStreamArchiveTest {
|
|||
{"type":"tile","x":0,"y":0,"z":0,"encodedData":"AA=="}
|
||||
{"type":"tile","x":1,"y":2,"z":3,"encodedData":"AQ=="}
|
||||
{"type":"finish","metadata":%s}
|
||||
""".formatted(MAX_METADATA_OUT)
|
||||
""".formatted(TestUtils.MAX_METADATA_SERIALIZED)
|
||||
.replace('\n', ' ');
|
||||
|
||||
testTileOptions(tempDir, config, expectedJson);
|
||||
|
@ -221,7 +155,7 @@ class WriteableJsonStreamArchiveTest {
|
|||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(0, 0, 0), new byte[]{0}, OptionalLong.empty()));
|
||||
tileWriter.write(new TileEncodingResult(TileCoord.ofXYZ(1, 2, 3), new byte[]{1}, OptionalLong.empty()));
|
||||
}
|
||||
archive.finish(MAX_METADATA_IN);
|
||||
archive.finish(TestUtils.MAX_METADATA_DESERIALIZED);
|
||||
}
|
||||
|
||||
assertEqualsDelimitedJson(expectedJson, Files.readString(csvFile));
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.locationtech.jts.geom.CoordinateXY;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Envelope;
|
||||
|
||||
class WriteableProtoStreamArchiveTest {
|
||||
|
@ -31,15 +31,17 @@ class WriteableProtoStreamArchiveTest {
|
|||
private static final StreamArchiveConfig defaultConfig = new StreamArchiveConfig(false, null);
|
||||
private static final TileArchiveMetadata maxMetadataIn =
|
||||
new TileArchiveMetadata("name", "description", "attribution", "version", "type", "format", new Envelope(0, 1, 2, 3),
|
||||
new CoordinateXY(1.3, 3.7), 1.0, 2, 3,
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer("vl0",
|
||||
Map.of("1", LayerAttrStats.FieldType.BOOLEAN, "2", LayerAttrStats.FieldType.NUMBER, "3",
|
||||
LayerAttrStats.FieldType.STRING),
|
||||
Optional.of("description"), OptionalInt.of(1), OptionalInt.of(2)),
|
||||
new LayerAttrStats.VectorLayer("vl1",
|
||||
Map.of(),
|
||||
Optional.empty(), OptionalInt.empty(), OptionalInt.empty())
|
||||
new Coordinate(1.3, 3.7, 1.0), 2, 3,
|
||||
TileArchiveMetadata.TileArchiveMetadataJson.create(
|
||||
List.of(
|
||||
new LayerAttrStats.VectorLayer("vl0",
|
||||
Map.of("1", LayerAttrStats.FieldType.BOOLEAN, "2", LayerAttrStats.FieldType.NUMBER, "3",
|
||||
LayerAttrStats.FieldType.STRING),
|
||||
Optional.of("description"), OptionalInt.of(1), OptionalInt.of(2)),
|
||||
new LayerAttrStats.VectorLayer("vl1",
|
||||
Map.of(),
|
||||
Optional.empty(), OptionalInt.empty(), OptionalInt.empty())
|
||||
)
|
||||
),
|
||||
Map.of("a", "b", "c", "d"),
|
||||
TileCompression.GZIP);
|
||||
|
@ -47,8 +49,8 @@ class WriteableProtoStreamArchiveTest {
|
|||
.setName("name").setDescription("description").setAttribution("attribution").setVersion("version")
|
||||
.setType("type").setFormat("format")
|
||||
.setBounds(StreamArchiveProto.Envelope.newBuilder().setMinX(0).setMaxX(1).setMinY(2).setMaxY(3).build())
|
||||
.setCenter(StreamArchiveProto.CoordinateXY.newBuilder().setX(1.3).setY(3.7))
|
||||
.setZoom(1.0).setMinZoom(2).setMaxZoom(3)
|
||||
.setCenter(StreamArchiveProto.Coordinate.newBuilder().setX(1.3).setY(3.7).setZ(1.0))
|
||||
.setMinZoom(2).setMaxZoom(3)
|
||||
.addVectorLayers(
|
||||
StreamArchiveProto.VectorLayer.newBuilder()
|
||||
.setId("vl0").setDescription("description").setMinZoom(1).setMaxZoom(2)
|
||||
|
@ -63,7 +65,7 @@ class WriteableProtoStreamArchiveTest {
|
|||
.build();
|
||||
|
||||
private static final TileArchiveMetadata minMetadataIn =
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null, null,
|
||||
new TileArchiveMetadata(null, null, null, null, null, null, null, null, null, null, null, null,
|
||||
TileCompression.NONE);
|
||||
private static final StreamArchiveProto.Metadata minMetadataOut = StreamArchiveProto.Metadata.newBuilder()
|
||||
.setTileCompression(StreamArchiveProto.TileCompression.TILE_COMPRESSION_NONE)
|
||||
|
|
Ładowanie…
Reference in New Issue