java downloader app

pull/1/head
Mike Barry 2021-08-04 21:22:20 -04:00
rodzic 5e4129225c
commit 9e00f62c52
31 zmienionych plików z 422 dodań i 107 usunięć

1
.gitignore vendored
Wyświetl plik

@ -7,6 +7,7 @@ target/
*.iml
!.idea/codeStyles
!.idea/vcs.xml
!.idea/saveactions_settings.xml
TODO

Wyświetl plik

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SaveActionSettings">
<option name="actions">
<set>
<option value="activate" />
<option value="organizeImports" />
<option value="reformat" />
</set>
</option>
<option name="configurationPath" value="" />
</component>
</project>

Wyświetl plik

@ -133,7 +133,7 @@ public class Arguments {
return new PrometheusStats(prometheus, job, interval);
} else {
LOGGER.info("Using in-memory stats");
return new Stats.InMemory();
return Stats.inMemory();
}
}

Wyświetl plik

@ -0,0 +1,118 @@
package com.onthegomap.flatmap;
import static com.google.common.net.HttpHeaders.CONTENT_LENGTH;
import static com.google.common.net.HttpHeaders.USER_AGENT;
import com.onthegomap.flatmap.monitoring.ProgressLoggers;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.file.Path;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A utility for downloading files to disk over HTTP.
*/
public class Download {
private static final Logger LOGGER = LoggerFactory.getLogger(Download.class);
private final CommonParams config;
private final List<ToDownload> toDownloadList = new ArrayList<>();
private final HttpClient client = HttpClient.newBuilder().followRedirects(HttpClient.Redirect.NORMAL).build();
private Download(CommonParams config) {
this.config = config;
}
private static record ToDownload(String id, String url, Path output, CompletableFuture<Long> size) {
ToDownload(String id, String url, Path output) {
this(id, url, output, new CompletableFuture<>());
}
}
public static Download create(CommonParams params) {
return new Download(params);
}
public Download add(String id, String url, Path output) {
toDownloadList.add(new ToDownload(id, url, output));
return this;
}
public void start() {
var downloads = CompletableFuture
.allOf(toDownloadList.stream().map(this::downloadIfNecessary).toArray(CompletableFuture[]::new));
ProgressLoggers loggers = new ProgressLoggers("download");
for (var toDownload : toDownloadList) {
try {
long size = toDownload.size.get();
loggers.addRatePercentCounter(toDownload.id, size, () -> FileUtils.fileSize(toDownload.output));
} catch (InterruptedException | ExecutionException e) {
throw new IllegalStateException(e);
}
}
loggers.awaitAndLog(downloads, config.logInterval());
}
private CompletableFuture<?> downloadIfNecessary(ToDownload toDownload) {
long existingSize = FileUtils.size(toDownload.output);
return httpSize(toDownload).thenCompose(size -> {
toDownload.size.complete(size);
if (size == existingSize) {
LOGGER.info("Skipping " + toDownload.id + ": " + toDownload.output + " already up-to-date");
return CompletableFuture.completedFuture(null);
} else {
LOGGER.info("Downloading " + toDownload.url + " to " + toDownload.output);
FileUtils.delete(toDownload.output);
FileUtils.createParentDirectories(toDownload.output);
return httpDownload(toDownload).whenComplete((result, error) -> {
if (result != null) {
LOGGER.info("Finished downloading " + toDownload.url + " to " + toDownload.output);
} else if (error != null) {
LOGGER.error("Error downloading " + toDownload.url + " to " + toDownload.output, error);
}
});
}
});
}
private CompletableFuture<Long> httpSize(ToDownload toDownload) {
return client
.sendAsync(newHttpRequest(toDownload.url).method("HEAD", HttpRequest.BodyPublishers.noBody()).build(),
responseInfo -> {
assertOK(responseInfo);
long contentLength = responseInfo.headers().firstValueAsLong(CONTENT_LENGTH).orElseThrow();
return HttpResponse.BodyHandlers.replacing(contentLength).apply(responseInfo);
}).thenApply(HttpResponse::body);
}
private CompletableFuture<?> httpDownload(ToDownload toDownload) {
return client.sendAsync(newHttpRequest(toDownload.url).GET().build(), responseInfo -> {
assertOK(responseInfo);
return HttpResponse.BodyHandlers.ofFile(toDownload.output).apply(responseInfo);
});
}
private static HttpRequest.Builder newHttpRequest(String url) {
return HttpRequest.newBuilder(URI.create(url))
.timeout(Duration.ofSeconds(30))
.header(USER_AGENT, "Flatmap downloader (https://github.com/onthegomap/flatmap)");
}
private static void assertOK(HttpResponse.ResponseInfo responseInfo) {
if (responseInfo.statusCode() != 200) {
throw new IllegalStateException("Bad response: " + responseInfo.statusCode());
}
}
}

Wyświetl plik

@ -79,4 +79,16 @@ public class FileUtils {
deleteFile(path);
}
}
public static void createParentDirectories(Path path) {
try {
if (Files.isDirectory(path)) {
Files.createDirectories(path);
} else {
Files.createDirectories(path.getParent());
}
} catch (IOException e) {
throw new IllegalStateException("Unable to create parent directories " + path, e);
}
}
}

Wyświetl plik

@ -9,11 +9,11 @@ import java.util.TreeMap;
public class Format {
private static final NavigableMap<Long, String> STORAGE_SUFFIXES = new TreeMap<>(Map.ofEntries(
Map.entry(1_000L, "kB"),
Map.entry(1_000_000L, "MB"),
Map.entry(1_000_000_000L, "GB"),
Map.entry(1_000_000_000_000L, "TB"),
Map.entry(1_000_000_000_000_000L, "PB")
Map.entry(1_000L, "k"),
Map.entry(1_000_000L, "M"),
Map.entry(1_000_000_000L, "G"),
Map.entry(1_000_000_000_000L, "T"),
Map.entry(1_000_000_000_000_000L, "P")
));
private static final NavigableMap<Long, String> NUMERIC_SUFFIXES = new TreeMap<>(Map.ofEntries(
Map.entry(1_000L, "k"),
@ -61,7 +61,7 @@ public class Format {
long value = num.longValue();
double doubleValue = num.doubleValue();
if (value < 0) {
return "-" + format(-value, pad, suffixes);
return padLeft("-", pad ? 4 : 0);
} else if (doubleValue > 0 && doubleValue < 1) {
return padLeft("<1", pad ? 4 : 0);
} else if (value < 1000) {

Wyświetl plik

@ -0,0 +1,76 @@
package com.onthegomap.flatmap;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* A utility for searching https://download.geofabrik.de/ for a .osm.pbf download URL.
*/
public class Geofabrik {
private static final ObjectMapper objectMapper = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
private static record Properties(String id, String parent, String name, Map<String, String> urls) {}
private static record Feature(Properties properties) {}
private static record Index(List<Feature> features) {}
private static Set<String> tokens(String in) {
return Stream.of(in.toLowerCase(Locale.ROOT).split("[^a-z]+")).collect(Collectors.toSet());
}
static String getDownloadUrl(String searchQuery, InputStream indexContent) throws IOException {
Set<String> searchTokens = tokens(searchQuery);
Index index = objectMapper.readValue(indexContent, Index.class);
List<Properties> approx = new ArrayList<>();
List<Properties> exact = new ArrayList<>();
for (var feature : index.features) {
Properties properties = feature.properties;
if (properties.urls.containsKey("pbf")) {
if (tokens(properties.id).equals(searchTokens) ||
tokens(properties.name).equals(searchTokens)) {
exact.add(properties);
} else if (tokens(properties.name).containsAll(searchTokens)) {
approx.add(properties);
}
}
}
if (exact.size() > 1) {
throw new IllegalArgumentException(
"Multiple exact matches for '" + searchQuery + "': " + exact.stream().map(d -> d.id).collect(
Collectors.joining(", ")));
} else if (exact.size() == 1) {
return exact.get(0).urls.get("pbf");
} else {
if (approx.size() > 1) {
throw new IllegalArgumentException(
"Multiple approximate matches for '" + searchQuery + "': " + approx.stream().map(d -> d.id).collect(
Collectors.joining(", ")));
} else if (approx.size() == 1) {
return approx.get(0).urls.get("pbf");
} else {
throw new IllegalArgumentException("No matches for '" + searchQuery + "'");
}
}
}
public static String getDownloadUrl(String searchQuery) {
try (InputStream inputStream = new URL("https://download.geofabrik.de/index-v1-nogeom.json").openStream()) {
return getDownloadUrl(searchQuery, inputStream);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}

Wyświetl plik

@ -1,5 +1,9 @@
package com.onthegomap.flatmap;
import static com.google.common.net.HttpHeaders.ACCEPT;
import static com.google.common.net.HttpHeaders.CONTENT_TYPE;
import static com.google.common.net.HttpHeaders.USER_AGENT;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongObjectHashMap;
import com.carrotsearch.hppc.LongObjectMap;
@ -273,9 +277,9 @@ public class Wikidata {
HttpRequest request = HttpRequest.newBuilder(URI.create("https://query.wikidata.org/bigdata/namespace/wdq/sparql"))
.timeout(Duration.ofSeconds(30))
.header("User-Agent", "OpenMapTiles OSM name resolver (https://github.com/openmaptiles/openmaptiles)")
.header("Accept", "application/sparql-results+json")
.header("Content-Type", "application/sparql-query")
.header(USER_AGENT, "Flatmap OSM name resolver (https://github.com/onthegomap/flatmap)")
.header(ACCEPT, "application/sparql-results+json")
.header(CONTENT_TYPE, "application/sparql-query")
.POST(HttpRequest.BodyPublishers.ofString(query, StandardCharsets.UTF_8))
.build();
InputStream response = client.send(request);

Wyświetl plik

@ -75,7 +75,7 @@ public interface LongLongMap extends Closeable {
.addRatePercentCounter("entries", entries, () -> counter.count)
.addProcessStats();
AtomicReference<String> writeRate = new AtomicReference<>();
new Worker("writer", new Stats.InMemory(), 1, () -> {
new Worker("writer", Stats.inMemory(), 1, () -> {
long start = System.nanoTime();
for (long i = 0; i < entries; i++) {
map.put(i + 1L, i + 2L);

Wyświetl plik

@ -17,8 +17,12 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiFunction;
import java.util.function.DoubleFunction;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
@ -56,6 +60,9 @@ public class ProgressLoggers {
long valueNow = getValue.getAsLong();
double timeDiff = (now - lastTime.get()) * 1d / (1d * TimeUnit.SECONDS.toNanos(1));
double valueDiff = valueNow - last.get();
if (valueDiff < 0) {
valueDiff = valueNow;
}
last.set(valueNow);
lastTime.set(now);
return ANSI_GREEN + "[ " + formatNumeric(valueNow, true) + " " + formatNumeric(valueDiff / timeDiff, true)
@ -73,6 +80,15 @@ public class ProgressLoggers {
}
public ProgressLoggers addRatePercentCounter(String name, long total, LongSupplier getValue) {
return addRatePercentCounter(name, total, getValue, Format::formatNumeric);
}
public ProgressLoggers addRatePercentBytesCounter(String name, long total, LongSupplier getValue) {
return addRatePercentCounter(name, total, getValue, Format::formatStorage);
}
public ProgressLoggers addRatePercentCounter(String name, long total, LongSupplier getValue,
BiFunction<Number, Boolean, String> format) {
AtomicLong last = new AtomicLong(getValue.getAsLong());
AtomicLong lastTime = new AtomicLong(System.nanoTime());
loggers.add(new ProgressLogger(name, () -> {
@ -80,10 +96,13 @@ public class ProgressLoggers {
long valueNow = getValue.getAsLong();
double timeDiff = (now - lastTime.get()) * 1d / (1d * TimeUnit.SECONDS.toNanos(1));
double valueDiff = valueNow - last.get();
if (valueDiff < 0) {
valueDiff = valueNow;
}
last.set(valueNow);
lastTime.set(now);
return ANSI_GREEN + "[ " + formatNumeric(valueNow, true) + " " + padLeft(formatPercent(1f * valueNow / total), 4)
+ " " + formatNumeric(valueDiff / timeDiff, true) + "/s ]" + ANSI_RESET;
return ANSI_GREEN + "[ " + format.apply(valueNow, true) + " " + padLeft(formatPercent(1f * valueNow / total), 4)
+ " " + format.apply(valueDiff / timeDiff, true) + "/s ]" + ANSI_RESET;
}));
return this;
}
@ -247,6 +266,23 @@ public class ProgressLoggers {
return add("\n ");
}
public void awaitAndLog(Future<?> future, Duration logInterval) {
while (!await(future, logInterval)) {
log();
}
}
private static boolean await(Future<?> future, Duration duration) {
try {
future.get(duration.toNanos(), TimeUnit.NANOSECONDS);
return true;
} catch (InterruptedException | ExecutionException e) {
throw new IllegalStateException(e);
} catch (TimeoutException e) {
return false;
}
}
private static record ProgressLogger(String name, Supplier<String> fn) {
@Override

Wyświetl plik

@ -51,6 +51,10 @@ public interface Stats extends AutoCloseable {
void dataError(String stat);
static Stats inMemory() {
return new InMemory();
}
class InMemory implements Stats {
private final Timers timers = new Timers();

Wyświetl plik

@ -108,6 +108,7 @@ public record Topology<T>(
for (T item : items) {
writer.accept(item);
}
queue.close();
return readFromQueue(queue);
}

Wyświetl plik

@ -21,7 +21,7 @@ import org.locationtech.jts.geom.Geometry;
public class FeatureCollectorTest {
private CommonParams config = CommonParams.defaults();
private FeatureCollector.Factory factory = new FeatureCollector.Factory(config, new Stats.InMemory());
private FeatureCollector.Factory factory = new FeatureCollector.Factory(config, Stats.inMemory());
private static void assertFeatures(int zoom, List<Map<String, Object>> expected, FeatureCollector actual) {
List<FeatureCollector.Feature> actualList = StreamSupport.stream(actual.spliterator(), false).toList();

Wyświetl plik

@ -62,7 +62,7 @@ public class FlatMapTest {
private static final int Z12_TILES = 1 << 12;
private static final double Z12_WIDTH = 1d / Z12_TILES;
private static final int Z4_TILES = 1 << 4;
private final Stats stats = new Stats.InMemory();
private final Stats stats = Stats.inMemory();
private static <T extends ReaderElement> T with(T elem, Consumer<T> fn) {
fn.accept(elem);
@ -99,7 +99,7 @@ public class FlatMapTest {
osmElements.stream().filter(e -> e.getType() == ReaderElement.RELATION).forEachOrdered(next);
};
var nodeMap = LongLongMap.newInMemorySortedTable();
try (var reader = new OpenStreetMapReader(elems, nodeMap, profile, new Stats.InMemory())) {
try (var reader = new OpenStreetMapReader(elems, nodeMap, profile, Stats.inMemory())) {
reader.pass1(config);
reader.pass2(featureGroup, config);
}

Wyświetl plik

@ -17,7 +17,7 @@ public class FormatTest {
"99999,99k",
"999999,999k",
"9999999,9.9M",
"-9999999,-9.9M",
"-9999999,-",
"5.5e12,5.5T",
})
public void testFormatNumeric(Double number, String formatted) {
@ -27,9 +27,9 @@ public class FormatTest {
@ParameterizedTest
@CsvSource({
"999,999",
"1000,1kB",
"9999,9.9kB",
"5.5e9,5.5GB",
"1000,1k",
"9999,9.9k",
"5.5e9,5.5G",
})
public void testFormatStorage(Double number, String formatted) {
assertEquals(formatted, Format.formatStorage(number, false));

Wyświetl plik

@ -0,0 +1,49 @@
package com.onthegomap.flatmap;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.junit.jupiter.api.Test;
public class GeofabrikTest {
private static final byte[] response = """
{ "type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {
"id" : "afghanistan",
"parent" : "asia",
"iso3166-1:alpha2" : [ "AF" ],
"name" : "Afghanistan",
"urls" : {
"pbf" : "https://download.geofabrik.de/asia/afghanistan-latest.osm.pbf",
"bz2" : "https://download.geofabrik.de/asia/afghanistan-latest.osm.bz2",
"shp" : "https://download.geofabrik.de/asia/afghanistan-latest-free.shp.zip",
"pbf-internal" : "https://osm-internal.download.geofabrik.de/asia/afghanistan-latest-internal.osm.pbf",
"history" : "https://osm-internal.download.geofabrik.de/asia/afghanistan-internal.osh.pbf",
"taginfo" : "https://taginfo.geofabrik.de/asia/afghanistan/",
"updates" : "https://download.geofabrik.de/asia/afghanistan-updates"
}
}
}
]
}
""".getBytes(StandardCharsets.UTF_8);
@Test
public void testFound() throws IOException {
String url = Geofabrik.getDownloadUrl("afghanistan", new ByteArrayInputStream(response));
assertEquals("https://download.geofabrik.de/asia/afghanistan-latest.osm.pbf", url);
}
@Test
public void testNotFound() {
assertThrows(IllegalArgumentException.class,
() -> Geofabrik.getDownloadUrl("monaco", new ByteArrayInputStream(response)));
}
}

Wyświetl plik

@ -28,7 +28,7 @@ import org.locationtech.jts.geom.Geometry;
public class FeatureGroupTest {
private final FeatureSort sorter = FeatureSort.newInMemory();
private FeatureGroup features = new FeatureGroup(sorter, new Profile.NullProfile(), new Stats.InMemory());
private FeatureGroup features = new FeatureGroup(sorter, new Profile.NullProfile(), Stats.inMemory());
@Test
public void testEmpty() {
@ -211,7 +211,7 @@ public class FeatureGroupTest {
Collections.reverse(items);
return items;
}
}, new Stats.InMemory());
}, Stats.inMemory());
int x = 5, y = 6;
putWithGroup(
1, "layer", Map.of("id", 3), newPoint(x, y), 0, 1, 2

Wyświetl plik

@ -26,7 +26,7 @@ public class FeatureSortTest {
}
private FeatureSort newSorter(int workers, int chunkSizeLimit, boolean gzip) {
return FeatureSort.newExternalMergeSort(tmpDir, workers, chunkSizeLimit, gzip, config, new Stats.InMemory());
return FeatureSort.newExternalMergeSort(tmpDir, workers, chunkSizeLimit, gzip, config, Stats.inMemory());
}
@Test

Wyświetl plik

@ -14,7 +14,7 @@ public class ProgressLoggersTest {
@Timeout(10)
public void testLogTopology() {
var latch = new CountDownLatch(1);
var topology = Topology.start("topo", new Stats.InMemory())
var topology = Topology.start("topo", Stats.inMemory())
.fromGenerator("reader", next -> latch.await())
.addBuffer("reader_queue", 10)
.addWorker("worker", 2, (a, b) -> latch.await())
@ -33,6 +33,7 @@ public class ProgressLoggersTest {
log.replaceAll("[ 0-9][0-9]%", " 0%"));
latch.countDown();
topology.awaitAndLog(loggers, Duration.ofSeconds(10));
loggers.getLog();
assertEquals("[prefix]\n reader( -%) -> (0/10) -> worker( -% -%) -> (0/10) -> writer( -% -%)",
loggers.getLog());
}

Wyświetl plik

@ -25,12 +25,12 @@ public class NaturalEarthReaderTest {
@Timeout(30)
public void testReadNaturalEarth(String filename, @TempDir Path tempDir) {
var path = Path.of("src", "test", "resources", filename);
try (var reader = new NaturalEarthReader("test", path, tempDir, new Profile.NullProfile(), new Stats.InMemory())) {
try (var reader = new NaturalEarthReader("test", path, tempDir, new Profile.NullProfile(), Stats.inMemory())) {
for (int i = 1; i <= 2; i++) {
assertEquals(19, reader.getCount(), "iter " + i);
List<Geometry> points = new ArrayList<>();
Topology.start("test", new Stats.InMemory())
Topology.start("test", Stats.inMemory())
.fromGenerator("naturalearth", reader.read())
.addBuffer("reader_queue", 100, 1)
.sinkToConsumer("counter", 1, elem -> {

Wyświetl plik

@ -27,7 +27,7 @@ public class OpenStreetMapReaderTest {
public final OsmSource osmSource = (name, threads) -> next -> {
};
private final Stats stats = new Stats.InMemory();
private final Stats stats = Stats.inMemory();
private final Profile profile = new Profile.NullProfile();
private final LongLongMap longLongMap = LongLongMap.newInMemoryHashMap();

Wyświetl plik

@ -27,7 +27,7 @@ public class OsmInputFileTest {
AtomicInteger nodes = new AtomicInteger(0);
AtomicInteger ways = new AtomicInteger(0);
AtomicInteger rels = new AtomicInteger(0);
Topology.start("test", new Stats.InMemory())
Topology.start("test", Stats.inMemory())
.fromGenerator("pbf", file.read("test", 2))
.addBuffer("reader_queue", 1_000, 100)
.sinkToConsumer("counter", 1, elem -> {

Wyświetl plik

@ -22,7 +22,7 @@ public class ShapefileReaderTest {
"test",
Path.of("src", "test", "resources", "shapefile.zip"),
new Profile.NullProfile(),
new Stats.InMemory()
Stats.inMemory()
);
@AfterEach
@ -41,7 +41,7 @@ public class ShapefileReaderTest {
public void testReadShapefile() {
for (int i = 1; i <= 2; i++) {
List<Geometry> points = new ArrayList<>();
Topology.start("test", new Stats.InMemory())
Topology.start("test", Stats.inMemory())
.fromGenerator("shapefile", reader.read())
.addBuffer("reader_queue", 100, 1)
.sinkToConsumer("counter", 1, elem -> {

Wyświetl plik

@ -40,7 +40,7 @@ import org.locationtech.jts.precision.GeometryPrecisionReducer;
public class FeatureRendererTest {
private CommonParams config = CommonParams.defaults();
private final Stats stats = new Stats.InMemory();
private final Stats stats = Stats.inMemory();
private FeatureCollector collector(Geometry worldGeom) {
var latLonGeom = GeoUtils.worldToLatLonCoords(worldGeom);
@ -50,7 +50,7 @@ public class FeatureRendererTest {
private Map<TileCoord, Collection<Geometry>> renderGeometry(FeatureCollector.Feature feature) {
Map<TileCoord, Collection<Geometry>> result = new TreeMap<>();
new FeatureRenderer(config, rendered -> result.computeIfAbsent(rendered.tile(), tile -> new HashSet<>())
.add(decodeSilently(rendered.vectorTileFeature().geometry())), new Stats.InMemory()).accept(feature);
.add(decodeSilently(rendered.vectorTileFeature().geometry())), Stats.inMemory()).accept(feature);
result.values().forEach(gs -> gs.forEach(TestUtils::validateGeometry));
return result;
}
@ -58,7 +58,7 @@ public class FeatureRendererTest {
private Map<TileCoord, Collection<RenderedFeature>> renderFeatures(FeatureCollector.Feature feature) {
Map<TileCoord, Collection<RenderedFeature>> result = new TreeMap<>();
new FeatureRenderer(config, rendered -> result.computeIfAbsent(rendered.tile(), tile -> new HashSet<>())
.add(rendered), new Stats.InMemory()).accept(feature);
.add(rendered), Stats.inMemory()).accept(feature);
result.values()
.forEach(gs -> gs.forEach(f -> TestUtils.validateGeometry(decodeSilently(f.vectorTileFeature().geometry()))));
return result;
@ -808,7 +808,7 @@ public class FeatureRendererTest {
.setZoomRange(maxZoom, maxZoom)
.setBufferPixels(0);
AtomicLong num = new AtomicLong(0);
new FeatureRenderer(config, rendered1 -> num.incrementAndGet(), new Stats.InMemory())
new FeatureRenderer(config, rendered1 -> num.incrementAndGet(), Stats.inMemory())
.accept(feature);
assertEquals(num.get(), Math.pow(4, maxZoom));
}

Wyświetl plik

@ -14,7 +14,7 @@ import org.junit.jupiter.api.Timeout;
public class TopologyTest {
Stats stats = new Stats.InMemory();
Stats stats = Stats.inMemory();
@Test
@Timeout(10)

Wyświetl plik

@ -104,5 +104,5 @@ public class WorkQueueTest {
return new WorkQueue<>("queue", 1000, maxBatch, stats);
}
private static final Stats stats = new Stats.InMemory();
private static final Stats stats = Stats.inMemory();
}

Wyświetl plik

@ -21,7 +21,7 @@ import org.locationtech.jts.geom.Geometry;
public class BenchmarkMapping {
public static void main(String[] args) throws IOException {
var profile = new OpenMapTilesProfile(Translations.nullProvider(List.of()), Arguments.of(), new Stats.InMemory());
var profile = new OpenMapTilesProfile(Translations.nullProvider(List.of()), Arguments.of(), Stats.inMemory());
var random = new Random(0);
var input = new OsmInputFile(Path.of("data", "sources", "north-america_us_massachusetts.pbf"));
List<SourceFeature> inputs = new ArrayList<>();

Wyświetl plik

@ -55,7 +55,7 @@ public class OpenMapTilesMain {
var osmInput = new OsmInputFile(
arguments.inputFile(OpenMapTilesProfile.OSM_SOURCE, "input file", sourcesDir.resolve(fallbackOsmFile)));
Wikidata
.fetch(osmInput, wikidataNamesFile, CommonParams.from(arguments, osmInput), profile, new Stats.InMemory());
.fetch(osmInput, wikidataNamesFile, CommonParams.from(arguments, osmInput), profile, Stats.inMemory());
translations.addTranslationProvider(Wikidata.load(wikidataNamesFile));
System.exit(0);
}

Wyświetl plik

@ -18,7 +18,7 @@ public class OpenMaptilesProfileTest {
private final Translations translations = Translations.defaultProvider(List.of("en", "es", "de"))
.addTranslationProvider(wikidataTranslations);
private final OpenMapTilesProfile profile = new OpenMapTilesProfile(translations, Arguments.of(),
new Stats.InMemory());
Stats.inMemory());
@Test
public void testCaresAboutWikidata() {

Wyświetl plik

@ -32,12 +32,12 @@ public abstract class AbstractLayerTest {
final Wikidata.WikidataTranslations wikidataTranslations = new Wikidata.WikidataTranslations();
final Translations translations = Translations.defaultProvider(List.of("en", "es", "de"))
.addTranslationProvider(wikidataTranslations);
.addTranslationProvider(wikidataTranslations);
final CommonParams params = CommonParams.defaults();
final OpenMapTilesProfile profile = new OpenMapTilesProfile(translations, Arguments.of(),
new Stats.InMemory());
final Stats stats = new Stats.InMemory();
Stats.inMemory());
final Stats stats = Stats.inMemory();
final FeatureCollector.Factory featureCollectorFactory = new FeatureCollector.Factory(params, stats);
static void assertFeatures(int zoom, List<Map<String, Object>> expected, Iterable<FeatureCollector.Feature> actual) {
@ -58,11 +58,11 @@ public abstract class AbstractLayerTest {
VectorTileEncoder.Feature pointFeature(String layer, Map<String, Object> map, int group) {
return new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newPoint(0, 0)),
new HashMap<>(map),
group
layer,
1,
VectorTileEncoder.encodeGeometry(newPoint(0, 0)),
new HashMap<>(map),
group
);
}
@ -102,31 +102,31 @@ public abstract class AbstractLayerTest {
SourceFeature pointFeature(Map<String, Object> props) {
return new ReaderFeature(
newPoint(0, 0),
new HashMap<>(props),
OSM_SOURCE,
null,
0
newPoint(0, 0),
new HashMap<>(props),
OSM_SOURCE,
null,
0
);
}
SourceFeature lineFeature(Map<String, Object> props) {
return new ReaderFeature(
newLineString(0, 0, 1, 1),
new HashMap<>(props),
OSM_SOURCE,
null,
0
newLineString(0, 0, 1, 1),
new HashMap<>(props),
OSM_SOURCE,
null,
0
);
}
SourceFeature polygonFeatureWithArea(double area, Map<String, Object> props) {
return new ReaderFeature(
GeoUtils.worldToLatLonCoords(rectangle(0, Math.sqrt(area))),
new HashMap<>(props),
OSM_SOURCE,
null,
0
GeoUtils.worldToLatLonCoords(rectangle(0, Math.sqrt(area))),
new HashMap<>(props),
OSM_SOURCE,
null,
0
);
}
@ -136,75 +136,75 @@ public abstract class AbstractLayerTest {
@NotNull
protected ReaderFeature lineFeatureWithRelation(List<OpenStreetMapReader.RelationInfo> relationInfos,
Map<String, Object> map) {
Map<String, Object> map) {
return new ReaderFeature(
newLineString(0, 0, 1, 1),
map,
OSM_SOURCE,
null,
0,
(relationInfos == null ? List.<OpenStreetMapReader.RelationInfo>of() : relationInfos).stream()
.map(r -> new OpenStreetMapReader.RelationMember<>("", r)).toList()
newLineString(0, 0, 1, 1),
map,
OSM_SOURCE,
null,
0,
(relationInfos == null ? List.<OpenStreetMapReader.RelationInfo>of() : relationInfos).stream()
.map(r -> new OpenStreetMapReader.RelationMember<>("", r)).toList()
);
}
protected void testMergesLinestrings(Map<String, Object> attrs, String layer,
double length, int zoom) throws GeometryException {
double length, int zoom) throws GeometryException {
var line1 = new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length / 2, 0)),
attrs,
0
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length / 2, 0)),
attrs,
0
);
var line2 = new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(length / 2, 0, length, 0)),
attrs,
0
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(length / 2, 0, length, 0)),
attrs,
0
);
var connected = new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length, 0)),
attrs,
0
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length, 0)),
attrs,
0
);
assertEquals(
List.of(connected),
profile.postProcessLayerFeatures(layer, zoom, List.of(line1, line2))
List.of(connected),
profile.postProcessLayerFeatures(layer, zoom, List.of(line1, line2))
);
}
protected void testDoesNotMergeLinestrings(Map<String, Object> attrs, String layer,
double length, int zoom) throws GeometryException {
double length, int zoom) throws GeometryException {
var line1 = new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length / 2, 0)),
attrs,
0
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length / 2, 0)),
attrs,
0
);
var line2 = new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(length / 2, 0, length, 0)),
attrs,
0
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(length / 2, 0, length, 0)),
attrs,
0
);
var connected = new VectorTileEncoder.Feature(
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length, 0)),
attrs,
0
layer,
1,
VectorTileEncoder.encodeGeometry(newLineString(0, 0, length, 0)),
attrs,
0
);
assertEquals(
List.of(line1, line2),
profile.postProcessLayerFeatures(layer, zoom, List.of(line1, line2))
List.of(line1, line2),
profile.postProcessLayerFeatures(layer, zoom, List.of(line1, line2))
);
}
}

Wyświetl plik

@ -4,4 +4,4 @@ set -o errexit
set -o pipefail
set -o nounset
mvn -DskipTests=true --projects openmaptiles -am clean package
mvn -DskipTests=true --projects flatmap-openmaptiles -am clean package