kopia lustrzana https://github.com/onthegomap/planetiler
shapefile reader
rodzic
8300b937c4
commit
adcb6576d9
|
@ -19,4 +19,4 @@ fi
|
||||||
echo "Running..."
|
echo "Running..."
|
||||||
java -Dinput="./data/sources/${AREA}.pbf" \
|
java -Dinput="./data/sources/${AREA}.pbf" \
|
||||||
-cp "$JAR" \
|
-cp "$JAR" \
|
||||||
com.onthegomap.flatmap.profiles.OpenMapTilesProfile
|
com.onthegomap.flatmap.OpenMapTilesMain
|
||||||
|
|
|
@ -49,7 +49,7 @@ public class OpenMapTilesMain {
|
||||||
|
|
||||||
LOGGER.info("Building OpenMapTiles profile into " + output + " in these phases:");
|
LOGGER.info("Building OpenMapTiles profile into " + output + " in these phases:");
|
||||||
if (fetchWikidata) {
|
if (fetchWikidata) {
|
||||||
LOGGER.info("- [wikidata] Fetch OpenStreetMap element name translations from wikidata");
|
LOGGER.info(" [wikidata] Fetch OpenStreetMap element name translations from wikidata");
|
||||||
}
|
}
|
||||||
LOGGER.info(" [lake_centerlines] Extract lake centerlines");
|
LOGGER.info(" [lake_centerlines] Extract lake centerlines");
|
||||||
LOGGER.info(" [water_polygons] Process ocean polygons");
|
LOGGER.info(" [water_polygons] Process ocean polygons");
|
||||||
|
@ -78,12 +78,9 @@ public class OpenMapTilesMain {
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.time("lake_centerlines", () ->
|
stats.time("lake_centerlines", () ->
|
||||||
new ShapefileReader("EPSG:3857", centerlines, stats)
|
ShapefileReader.process("EPSG:3857", "lake_centerlines", centerlines, renderer, featureMap, config));
|
||||||
.process("lake_centerlines", renderer, featureMap, config));
|
|
||||||
stats.time("water_polygons", () ->
|
stats.time("water_polygons", () ->
|
||||||
new ShapefileReader(waterPolygons, stats)
|
ShapefileReader.process("water_polygons", waterPolygons, renderer, featureMap, config));
|
||||||
.process("water_polygons", renderer, featureMap, config)
|
|
||||||
);
|
|
||||||
stats.time("natural_earth", () ->
|
stats.time("natural_earth", () ->
|
||||||
new NaturalEarthReader(naturalEarth, tmpDir.resolve("natearth.sqlite").toFile(), stats)
|
new NaturalEarthReader(naturalEarth, tmpDir.resolve("natearth.sqlite").toFile(), stats)
|
||||||
.process("natural_earth", renderer, featureMap, config)
|
.process("natural_earth", renderer, featureMap, config)
|
||||||
|
|
|
@ -17,7 +17,12 @@ public class NaturalEarthReader extends Reader {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SourceStep<SourceFeature> open() {
|
public SourceStep<SourceFeature> read() {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,14 +12,15 @@ import com.onthegomap.flatmap.monitoring.ProgressLoggers;
|
||||||
import com.onthegomap.flatmap.monitoring.Stats;
|
import com.onthegomap.flatmap.monitoring.Stats;
|
||||||
import com.onthegomap.flatmap.worker.Topology;
|
import com.onthegomap.flatmap.worker.Topology;
|
||||||
import com.onthegomap.flatmap.worker.Topology.SourceStep;
|
import com.onthegomap.flatmap.worker.Topology.SourceStep;
|
||||||
|
import java.io.Closeable;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import org.locationtech.jts.geom.Envelope;
|
import org.locationtech.jts.geom.Envelope;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public abstract class Reader {
|
public abstract class Reader implements Closeable {
|
||||||
|
|
||||||
private final Stats stats;
|
protected final Stats stats;
|
||||||
private final Logger LOGGER = LoggerFactory.getLogger(getClass());
|
private final Logger LOGGER = LoggerFactory.getLogger(getClass());
|
||||||
|
|
||||||
public Reader(Stats stats) {
|
public Reader(Stats stats) {
|
||||||
|
@ -35,7 +36,7 @@ public abstract class Reader {
|
||||||
AtomicLong featuresWritten = new AtomicLong(0);
|
AtomicLong featuresWritten = new AtomicLong(0);
|
||||||
|
|
||||||
var topology = Topology.start(name, stats)
|
var topology = Topology.start(name, stats)
|
||||||
.fromGenerator("read", open())
|
.fromGenerator("read", read())
|
||||||
.addBuffer("read_queue", 1000)
|
.addBuffer("read_queue", 1000)
|
||||||
.<RenderedFeature>addWorker("process", threads, (prev, next) -> {
|
.<RenderedFeature>addWorker("process", threads, (prev, next) -> {
|
||||||
RenderableFeatures features = new RenderableFeatures();
|
RenderableFeatures features = new RenderableFeatures();
|
||||||
|
@ -69,6 +70,8 @@ public abstract class Reader {
|
||||||
|
|
||||||
public abstract long getCount();
|
public abstract long getCount();
|
||||||
|
|
||||||
public abstract SourceStep<SourceFeature> open();
|
public abstract SourceStep<SourceFeature> read();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public abstract void close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
package com.onthegomap.flatmap.reader;
|
||||||
|
|
||||||
|
import com.onthegomap.flatmap.SourceFeature;
|
||||||
|
import org.locationtech.jts.geom.Geometry;
|
||||||
|
|
||||||
|
public class ReaderFeature implements SourceFeature {
|
||||||
|
|
||||||
|
private final Geometry geometry;
|
||||||
|
|
||||||
|
public ReaderFeature(Geometry geometry) {
|
||||||
|
this.geometry = geometry;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Geometry getGeometry() {
|
||||||
|
return geometry;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,14 +1,104 @@
|
||||||
package com.onthegomap.flatmap.reader;
|
package com.onthegomap.flatmap.reader;
|
||||||
|
|
||||||
|
import com.onthegomap.flatmap.FeatureRenderer;
|
||||||
|
import com.onthegomap.flatmap.FlatMapConfig;
|
||||||
import com.onthegomap.flatmap.SourceFeature;
|
import com.onthegomap.flatmap.SourceFeature;
|
||||||
|
import com.onthegomap.flatmap.collections.MergeSortFeatureMap;
|
||||||
import com.onthegomap.flatmap.monitoring.Stats;
|
import com.onthegomap.flatmap.monitoring.Stats;
|
||||||
import com.onthegomap.flatmap.worker.Topology.SourceStep;
|
import com.onthegomap.flatmap.worker.Topology.SourceStep;
|
||||||
|
import java.io.Closeable;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.util.zip.ZipEntry;
|
||||||
|
import java.util.zip.ZipFile;
|
||||||
|
import org.geotools.data.FeatureSource;
|
||||||
|
import org.geotools.data.shapefile.ShapefileDataStore;
|
||||||
|
import org.geotools.feature.FeatureCollection;
|
||||||
|
import org.geotools.feature.FeatureIterator;
|
||||||
|
import org.geotools.geometry.jts.JTS;
|
||||||
|
import org.geotools.referencing.CRS;
|
||||||
|
import org.locationtech.jts.geom.Geometry;
|
||||||
|
import org.opengis.feature.simple.SimpleFeature;
|
||||||
|
import org.opengis.feature.simple.SimpleFeatureType;
|
||||||
|
import org.opengis.filter.Filter;
|
||||||
|
import org.opengis.referencing.FactoryException;
|
||||||
|
import org.opengis.referencing.crs.CoordinateReferenceSystem;
|
||||||
|
import org.opengis.referencing.operation.MathTransform;
|
||||||
|
|
||||||
public class ShapefileReader extends Reader {
|
public class ShapefileReader extends Reader implements Closeable {
|
||||||
|
|
||||||
|
private final FeatureCollection<SimpleFeatureType, SimpleFeature> inputSource;
|
||||||
|
final FeatureIterator<SimpleFeature> featureIterator;
|
||||||
|
private String[] attributeNames;
|
||||||
|
private final ShapefileDataStore dataStore;
|
||||||
|
private MathTransform transform;
|
||||||
|
|
||||||
|
public static void process(String sourceProjection, String name, File input, FeatureRenderer renderer,
|
||||||
|
MergeSortFeatureMap writer, FlatMapConfig config) {
|
||||||
|
try (var reader = new ShapefileReader(sourceProjection, input, config.stats())) {
|
||||||
|
reader.process(name, renderer, writer, config);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void process(String name, File input, FeatureRenderer renderer,
|
||||||
|
MergeSortFeatureMap writer, FlatMapConfig config) {
|
||||||
|
process(null, name, input, renderer, writer, config);
|
||||||
|
}
|
||||||
|
|
||||||
public ShapefileReader(String sourceProjection, File input, Stats stats) {
|
public ShapefileReader(String sourceProjection, File input, Stats stats) {
|
||||||
super(stats);
|
super(stats);
|
||||||
|
dataStore = decode(input);
|
||||||
|
try {
|
||||||
|
String typeName = dataStore.getTypeNames()[0];
|
||||||
|
FeatureSource<SimpleFeatureType, SimpleFeature> source =
|
||||||
|
dataStore.getFeatureSource(typeName);
|
||||||
|
|
||||||
|
inputSource = source.getFeatures(Filter.INCLUDE);
|
||||||
|
CoordinateReferenceSystem src =
|
||||||
|
sourceProjection == null ? source.getSchema().getCoordinateReferenceSystem() : CRS.decode(sourceProjection);
|
||||||
|
CoordinateReferenceSystem dest = CRS.decode("EPSG:4326", true);
|
||||||
|
transform = CRS.findMathTransform(src, dest);
|
||||||
|
if (transform.isIdentity()) {
|
||||||
|
transform = null;
|
||||||
|
}
|
||||||
|
attributeNames = new String[inputSource.getSchema().getAttributeCount()];
|
||||||
|
for (int i = 0; i < attributeNames.length; i++) {
|
||||||
|
attributeNames[i] = inputSource.getSchema().getDescriptor(i).getLocalName();
|
||||||
|
}
|
||||||
|
this.featureIterator = inputSource.features();
|
||||||
|
} catch (IOException | FactoryException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ShapefileDataStore decode(File file) {
|
||||||
|
try {
|
||||||
|
final String name = file.getName();
|
||||||
|
|
||||||
|
URI uri;
|
||||||
|
|
||||||
|
if (name.endsWith(".zip")) {
|
||||||
|
String shapeFileInZip;
|
||||||
|
try (ZipFile zip = new ZipFile(file)) {
|
||||||
|
shapeFileInZip = zip.stream()
|
||||||
|
.map(ZipEntry::getName)
|
||||||
|
.filter(z -> z.endsWith(".shp"))
|
||||||
|
.findAny().orElse(null);
|
||||||
|
}
|
||||||
|
if (shapeFileInZip == null) {
|
||||||
|
throw new IllegalArgumentException("No .shp file found inside " + name);
|
||||||
|
}
|
||||||
|
uri = URI.create("jar:file:" + file.toPath().toAbsolutePath() + "!/" + shapeFileInZip);
|
||||||
|
} else if (name.endsWith(".shp")) {
|
||||||
|
uri = file.toURI();
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid shapefile input: " + file + " must be zip or shp");
|
||||||
|
}
|
||||||
|
return new ShapefileDataStore(uri.toURL());
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public ShapefileReader(File input, Stats stats) {
|
public ShapefileReader(File input, Stats stats) {
|
||||||
|
@ -17,11 +107,34 @@ public class ShapefileReader extends Reader {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long getCount() {
|
public long getCount() {
|
||||||
return 0;
|
return inputSource.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SourceStep<SourceFeature> open() {
|
public SourceStep<SourceFeature> read() {
|
||||||
return null;
|
return next -> {
|
||||||
|
while (featureIterator.hasNext()) {
|
||||||
|
SimpleFeature feature = featureIterator.next();
|
||||||
|
Geometry source = (Geometry) feature.getDefaultGeometry();
|
||||||
|
Geometry transformed = source;
|
||||||
|
if (transform != null) {
|
||||||
|
transformed = JTS.transform(source, transform);
|
||||||
|
}
|
||||||
|
if (transformed != null) {
|
||||||
|
SourceFeature geom = new ReaderFeature(transformed);
|
||||||
|
// TODO
|
||||||
|
// for (int i = 1; i < attributeNames.length; i++) {
|
||||||
|
// geom.setTag(attributeNames[i], feature.getAttribute(i));
|
||||||
|
// }
|
||||||
|
next.accept(geom);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
featureIterator.close();
|
||||||
|
dataStore.dispose();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,16 +13,16 @@ import org.junit.jupiter.api.Timeout;
|
||||||
|
|
||||||
public class OsmInputFileTest {
|
public class OsmInputFileTest {
|
||||||
|
|
||||||
private OsmInputFile file = new OsmInputFile(new File("src/test/resources/andorra-latest.osm.pbf"));
|
private OsmInputFile file = new OsmInputFile(new File("src/test/resources/monaco-latest.osm.pbf"));
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetBounds() {
|
public void testGetBounds() {
|
||||||
assertArrayEquals(new double[]{1.412368, 42.4276, 1.787481, 42.65717}, file.getBounds());
|
assertArrayEquals(new double[]{7.409205, 43.72335, 7.448637, 43.75169}, file.getBounds());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Timeout(30)
|
@Timeout(30)
|
||||||
public void testReadAndorraTwice() {
|
public void testReadMonacoTwice() {
|
||||||
for (int i = 1; i <= 2; i++) {
|
for (int i = 1; i <= 2; i++) {
|
||||||
AtomicInteger nodes = new AtomicInteger(0);
|
AtomicInteger nodes = new AtomicInteger(0);
|
||||||
AtomicInteger ways = new AtomicInteger(0);
|
AtomicInteger ways = new AtomicInteger(0);
|
||||||
|
@ -37,9 +37,9 @@ public class OsmInputFileTest {
|
||||||
case ReaderElement.RELATION -> rels.incrementAndGet();
|
case ReaderElement.RELATION -> rels.incrementAndGet();
|
||||||
}
|
}
|
||||||
}).await();
|
}).await();
|
||||||
assertEquals(246_028, nodes.get(), "nodes pass " + i);
|
assertEquals(25_423, nodes.get(), "nodes pass " + i);
|
||||||
assertEquals(12_677, ways.get(), "ways pass " + i);
|
assertEquals(4_106, ways.get(), "ways pass " + i);
|
||||||
assertEquals(287, rels.get(), "rels pass " + i);
|
assertEquals(243, rels.get(), "rels pass " + i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
package com.onthegomap.flatmap.reader;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import com.onthegomap.flatmap.GeoUtils;
|
||||||
|
import com.onthegomap.flatmap.monitoring.Stats.InMemory;
|
||||||
|
import com.onthegomap.flatmap.worker.Topology;
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.junit.jupiter.api.Timeout;
|
||||||
|
import org.locationtech.jts.geom.Geometry;
|
||||||
|
|
||||||
|
public class ShapefileReaderTest {
|
||||||
|
|
||||||
|
private ShapefileReader reader = new ShapefileReader(new File("src/test/resources/shapefile.zip"), new InMemory());
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
public void close() {
|
||||||
|
reader.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCount() {
|
||||||
|
assertEquals(86, reader.getCount());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Timeout(30)
|
||||||
|
public void testReadShapefile() {
|
||||||
|
Map<String, Integer> counts = new TreeMap<>();
|
||||||
|
List<Geometry> points = new ArrayList<>();
|
||||||
|
Topology.start("test", new InMemory())
|
||||||
|
.fromGenerator("shapefile", reader.read())
|
||||||
|
.addBuffer("reader_queue", 100, 1)
|
||||||
|
.sinkToConsumer("counter", 1, elem -> {
|
||||||
|
String type = elem.getGeometry().getGeometryType();
|
||||||
|
counts.put(type, counts.getOrDefault(type, 0) + 1);
|
||||||
|
points.add(elem.getGeometry());
|
||||||
|
}).await();
|
||||||
|
assertEquals(86, points.size());
|
||||||
|
var gc = GeoUtils.gf.createGeometryCollection(points.toArray(new Geometry[0]));
|
||||||
|
var centroid = gc.getCentroid();
|
||||||
|
assertEquals(-77.0297995, centroid.getX(), 5);
|
||||||
|
assertEquals(38.9119684, centroid.getY(), 5);
|
||||||
|
}
|
||||||
|
}
|
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Plik binarny nie jest wyświetlany.
Ładowanie…
Reference in New Issue