Workaround for getting multiple (duplicated) ways from the server due to splitting up the download area into smaller chunks

Signed-off-by: Taylor Smock <taylor.smock@kaart.com>
pull/1/head
Taylor Smock 2019-10-23 16:28:28 -06:00
rodzic 858e7ba8ca
commit 809c8016a6
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 625F6A74A3E4311A
11 zmienionych plików z 907 dodań i 125 usunięć

Wyświetl plik

@ -72,7 +72,7 @@ assemble with java 13:
build:
stage: test
script:
- ./gradlew build generatePot generateSnapshotUpdateSite --stacktrace
- ./gradlew build generatePot generateSnapshotUpdateSite --info
artifacts:
paths:
- build
@ -82,7 +82,8 @@ build:
test:
stage: test
script:
- ./gradlew check jacocoTestReport --stacktrace --continue
- ./gradlew check --stacktrace --continue --info
- ./gradlew jacocoTestReport
artifacts:
paths:
- build

Wyświetl plik

@ -89,7 +89,13 @@ test {
jvmArgs("-javaagent:${classpath.find { it.name.contains("jmockit") }.absolutePath}")
}
useJUnitPlatform()
testLogging.exceptionFormat = 'full'
testLogging {
exceptionFormat "full"
events "passed", "skipped", "failed"
info {
showStandardStreams true
}
}
}
sourceSets {

Wyświetl plik

@ -32,6 +32,7 @@ import org.openstreetmap.josm.plugins.mapwithai.backend.MapWithAIMoveAction;
import org.openstreetmap.josm.plugins.mapwithai.backend.MapWithAIObject;
import org.openstreetmap.josm.plugins.mapwithai.backend.MapWithAIRemoteControl;
import org.openstreetmap.josm.plugins.mapwithai.backend.MapWithAIUploadHook;
import org.openstreetmap.josm.plugins.mapwithai.backend.MergeDuplicateWaysAction;
import org.openstreetmap.josm.tools.Destroyable;
import org.openstreetmap.josm.tools.Logging;
@ -49,6 +50,7 @@ public final class MapWithAIPlugin extends Plugin implements Destroyable {
MENU_ENTRIES.put(MapWithAIAction.class, false);
MENU_ENTRIES.put(MapWithAIArbitraryAction.class, true);
MENU_ENTRIES.put(MapWithAIMoveAction.class, false);
MENU_ENTRIES.put(MergeDuplicateWaysAction.class, true);
}
public MapWithAIPlugin(PluginInformation info) {
@ -83,7 +85,6 @@ public final class MapWithAIPlugin extends Plugin implements Destroyable {
destroyables = new ArrayList<>();
destroyables.add(new MapWithAIUploadHook(info));
mapFrameInitialized(null, MainApplication.getMap());
}
@Override

Wyświetl plik

@ -0,0 +1,209 @@
// License: GPL. For details, see LICENSE file.
package org.openstreetmap.josm.plugins.mapwithai.backend;
import static org.openstreetmap.josm.tools.I18n.tr;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.RecursiveTask;
import java.util.stream.Collectors;
import org.openstreetmap.josm.actions.MergeNodesAction;
import org.openstreetmap.josm.command.Command;
import org.openstreetmap.josm.command.DeleteCommand;
import org.openstreetmap.josm.data.osm.BBox;
import org.openstreetmap.josm.data.osm.DataSet;
import org.openstreetmap.josm.data.osm.Node;
import org.openstreetmap.josm.data.osm.OsmPrimitive;
import org.openstreetmap.josm.data.osm.UploadPolicy;
import org.openstreetmap.josm.gui.progress.NullProgressMonitor;
import org.openstreetmap.josm.gui.progress.ProgressMonitor;
import org.openstreetmap.josm.io.IllegalDataException;
import org.openstreetmap.josm.plugins.mapwithai.MapWithAIPlugin;
import org.openstreetmap.josm.plugins.mapwithai.commands.MergeDuplicateWays;
import org.openstreetmap.josm.tools.HttpClient;
import org.openstreetmap.josm.tools.HttpClient.Response;
import org.openstreetmap.josm.tools.Logging;
/**
* Get data in a parallel manner
*
* @author Taylor Smock
*/
public class GetDataRunnable extends RecursiveTask<DataSet> {
private static final long serialVersionUID = 258423685658089715L;
private final transient List<BBox> bbox;
private final transient DataSet dataSet;
private final transient ProgressMonitor monitor;
private static final Object LOCK = new Object();
/**
* @param bbox The initial bbox to get data from (don't reduce beforehand --
* it will be reduced here)
* @param dataSet The dataset to add the data to
* @param monitor A monitor to keep track of progress
*/
public GetDataRunnable(BBox bbox, DataSet dataSet, ProgressMonitor monitor) {
this(Arrays.asList(bbox), dataSet, monitor);
}
/**
* @param bbox The initial bboxes to get data from (don't reduce beforehand
* -- it will be reduced here)
* @param dataSet The dataset to add the data to
* @param monitor A monitor to keep track of progress
*/
public GetDataRunnable(List<BBox> bbox, DataSet dataSet, ProgressMonitor monitor) {
super();
this.bbox = new ArrayList<>(bbox);
this.dataSet = dataSet;
if (monitor == null) {
monitor = NullProgressMonitor.INSTANCE;
}
this.monitor = monitor;
}
@Override
public DataSet compute() {
final List<BBox> bboxes = MapWithAIDataUtils.reduceBBoxSize(bbox);
monitor.beginTask(tr("Downloading {0} data", MapWithAIPlugin.NAME), bboxes.size() - 1);
if (bboxes.size() == 1) {
final DataSet temporaryDataSet = getDataReal(bboxes.get(0));
synchronized (GetDataRunnable.class) {
dataSet.mergeFrom(temporaryDataSet);
}
} else {
final Collection<GetDataRunnable> tasks = bboxes.parallelStream()
.map(tBbox -> new GetDataRunnable(tBbox, dataSet, monitor.createSubTaskMonitor(1, true)))
.collect(Collectors.toList());
tasks.forEach(GetDataRunnable::fork);
tasks.forEach(GetDataRunnable::join);
}
monitor.finishTask();
synchronized (LOCK) {
/* Microsoft buildings don't have a source, so we add one */
MapWithAIDataUtils.addSourceTags(dataSet, "building", "Microsoft");
removeCommonTags(dataSet);
mergeNodes(dataSet);
// filterDataSet(dataSet);
cleanupDataSet(dataSet);
for (int i = 0; i < 5; i++) {
new MergeDuplicateWays(dataSet).executeCommand();
}
}
return dataSet;
}
private static void cleanupDataSet(DataSet dataSet) {
Map<OsmPrimitive, String> origIds = dataSet.allPrimitives().parallelStream()
.filter(prim -> prim.hasKey("orig_id")).distinct()
.collect(Collectors.toMap(prim -> prim, prim -> prim.get("orig_id")));
Map<OsmPrimitive, String> serverIds = dataSet.allPrimitives().parallelStream()
.filter(prim -> prim.hasKey("server_id")).distinct()
.collect(Collectors.toMap(prim -> prim, prim -> prim.get("server_id")));
List<OsmPrimitive> toDelete = origIds.entrySet().parallelStream()
.filter(entry -> serverIds.containsValue(entry.getValue())).map(Entry::getKey)
.collect(Collectors.toList());
if (!toDelete.isEmpty()) {
new DeleteCommand(toDelete).executeCommand();
}
origIds = origIds.entrySet().parallelStream().filter(entry -> !toDelete.contains(entry.getKey()))
.collect(Collectors.toMap(Entry::getKey, Entry::getValue));
serverIds.forEach((prim, str) -> prim.remove("server_id"));
origIds.forEach((prim, str) -> prim.remove("orig_id"));
}
/**
* Remove common tags from the dataset
*
* @param dataSet The dataset to remove tags from
*/
public static void removeCommonTags(DataSet dataSet) {
dataSet.allPrimitives().parallelStream().filter(prim -> prim.hasKey(MergeDuplicateWays.ORIG_ID))
.forEach(prim -> prim.remove(MergeDuplicateWays.ORIG_ID));
dataSet.getNodes().parallelStream().forEach(node -> node.remove("server_id"));
List<Node> emptyNodes = dataSet.getNodes().parallelStream().distinct().filter(node -> !node.isDeleted())
.filter(node -> node.getReferrers().isEmpty() && !node.hasKeys()).collect(Collectors.toList());
if (!emptyNodes.isEmpty()) {
new DeleteCommand(emptyNodes).executeCommand();
}
}
private static void mergeNodes(DataSet dataSet) {
List<Node> nodes = dataSet.getNodes().parallelStream().filter(node -> !node.isDeleted())
.collect(Collectors.toList());
for (int i = 0; i < nodes.size(); i++) {
Node n1 = nodes.get(i);
BBox bbox = new BBox();
bbox.addPrimitive(n1, 0.001);
List<Node> nearbyNodes = dataSet.searchNodes(bbox).parallelStream()
.filter(node -> !node.isDeleted() && node != n1
&& n1.getCoor().greatCircleDistance(node.getCoor()) < MapWithAIPreferenceHelper
.getMaxNodeDistance())
.collect(Collectors.toList());
Command mergeCommand = MergeNodesAction.mergeNodes(nearbyNodes, n1);
if (mergeCommand != null) {
mergeCommand.executeCommand();
nodes.removeAll(nearbyNodes);
}
}
}
/**
* Actually get the data
*
* @param bbox The bbox to get the data from
* @return A dataset with the data from the bbox
*/
private static DataSet getDataReal(BBox bbox) {
InputStream inputStream = null;
final DataSet dataSet = new DataSet();
String urlString = MapWithAIPreferenceHelper.getMapWithAIUrl();
if (DetectTaskingManagerUtils.hasTaskingManagerLayer()) {
urlString += "&crop_bbox={crop_bbox}";
}
dataSet.setUploadPolicy(UploadPolicy.DISCOURAGED);
try {
final URL url = new URL(urlString.replace("{bbox}", bbox.toStringCSV(",")).replace("{crop_bbox}",
DetectTaskingManagerUtils.getTaskingManagerBBox().toStringCSV(",")));
final HttpClient client = HttpClient.create(url);
final StringBuilder defaultUserAgent = new StringBuilder();
defaultUserAgent.append(client.getHeaders().get("User-Agent"));
if (defaultUserAgent.toString().trim().length() == 0) {
defaultUserAgent.append("JOSM");
}
defaultUserAgent.append(tr("/ {0} {1}", MapWithAIPlugin.NAME, MapWithAIPlugin.getVersionInfo()));
client.setHeader("User-Agent", defaultUserAgent.toString());
Logging.debug("{0}: Getting {1}", MapWithAIPlugin.NAME, client.getURL().toString());
final Response response = client.connect();
inputStream = response.getContent();
final DataSet mergeData = OsmReaderCustom.parseDataSet(inputStream, null, true);
dataSet.mergeFrom(mergeData);
response.disconnect();
} catch (UnsupportedOperationException | IllegalDataException | IOException e) {
Logging.debug(e);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (final IOException e) {
Logging.debug(e);
}
}
dataSet.setUploadPolicy(UploadPolicy.BLOCKED);
}
return dataSet;
}
}

Wyświetl plik

@ -1,13 +1,6 @@
// License: GPL. For details, see LICENSE file.
package org.openstreetmap.josm.plugins.mapwithai.backend;
import static org.openstreetmap.josm.tools.I18n.tr;
import java.awt.EventQueue;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -15,12 +8,9 @@ import java.util.List;
import java.util.Objects;
import java.util.TreeSet;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.locks.Lock;
import java.util.stream.Collectors;
import javax.swing.SwingUtilities;
import org.openstreetmap.josm.data.Bounds;
import org.openstreetmap.josm.data.UndoRedoHandler;
import org.openstreetmap.josm.data.coor.LatLon;
@ -29,7 +19,6 @@ import org.openstreetmap.josm.data.osm.DataSet;
import org.openstreetmap.josm.data.osm.Node;
import org.openstreetmap.josm.data.osm.OsmPrimitive;
import org.openstreetmap.josm.data.osm.Relation;
import org.openstreetmap.josm.data.osm.UploadPolicy;
import org.openstreetmap.josm.data.osm.Way;
import org.openstreetmap.josm.data.preferences.sources.ExtendedSourceEntry;
import org.openstreetmap.josm.data.preferences.sources.MapPaintPrefHelper;
@ -38,13 +27,8 @@ import org.openstreetmap.josm.data.preferences.sources.SourceType;
import org.openstreetmap.josm.gui.MainApplication;
import org.openstreetmap.josm.gui.layer.OsmDataLayer;
import org.openstreetmap.josm.gui.progress.swing.PleaseWaitProgressMonitor;
import org.openstreetmap.josm.io.IllegalDataException;
import org.openstreetmap.josm.io.OsmReader;
import org.openstreetmap.josm.plugins.mapwithai.MapWithAIPlugin;
import org.openstreetmap.josm.plugins.mapwithai.commands.MapWithAIAddCommand;
import org.openstreetmap.josm.tools.HttpClient;
import org.openstreetmap.josm.tools.HttpClient.Response;
import org.openstreetmap.josm.tools.Logging;
import org.openstreetmap.josm.tools.Utils;
/**
@ -56,90 +40,6 @@ public final class MapWithAIDataUtils {
private static ForkJoinPool forkJoinPool;
static final Object LAYER_LOCK = new Object();
private static class GetDataRunnable extends RecursiveTask<DataSet> {
private static final long serialVersionUID = 258423685658089715L;
private final transient List<BBox> bbox;
private final transient DataSet dataSet;
private final transient PleaseWaitProgressMonitor monitor;
public GetDataRunnable(BBox bbox, DataSet dataSet, PleaseWaitProgressMonitor monitor) {
this(Arrays.asList(bbox), dataSet, monitor);
}
public GetDataRunnable(List<BBox> bbox, DataSet dataSet, PleaseWaitProgressMonitor monitor) {
super();
this.bbox = new ArrayList<>(bbox);
this.dataSet = dataSet;
this.monitor = monitor;
}
private static DataSet getDataReal(BBox bbox) {
InputStream inputStream = null;
final DataSet dataSet = new DataSet();
String urlString = MapWithAIPreferenceHelper.getMapWithAIUrl();
if (DetectTaskingManagerUtils.hasTaskingManagerLayer()) {
urlString += "&crop_bbox={crop_bbox}";
}
dataSet.setUploadPolicy(UploadPolicy.DISCOURAGED);
try {
final URL url = new URL(urlString.replace("{bbox}", bbox.toStringCSV(",")).replace("{crop_bbox}",
DetectTaskingManagerUtils.getTaskingManagerBBox().toStringCSV(",")));
final HttpClient client = HttpClient.create(url);
final StringBuilder defaultUserAgent = new StringBuilder();
defaultUserAgent.append(client.getHeaders().get("User-Agent"));
if (defaultUserAgent.toString().trim().length() == 0) {
defaultUserAgent.append("JOSM");
}
defaultUserAgent.append(tr("/ {0} {1}", MapWithAIPlugin.NAME, MapWithAIPlugin.getVersionInfo()));
client.setHeader("User-Agent", defaultUserAgent.toString());
Logging.debug("{0}: Getting {1}", MapWithAIPlugin.NAME, client.getURL().toString());
final Response response = client.connect();
inputStream = response.getContent();
final DataSet mergeData = OsmReader.parseDataSet(inputStream, null);
dataSet.mergeFrom(mergeData);
response.disconnect();
} catch (UnsupportedOperationException | IllegalDataException | IOException e) {
Logging.debug(e);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (final IOException e) {
Logging.debug(e);
}
}
dataSet.setUploadPolicy(UploadPolicy.BLOCKED);
}
return dataSet;
}
@Override
public DataSet compute() {
final List<BBox> bboxes = reduceBBoxSize(bbox);
if (bboxes.size() == 1) {
final DataSet temporaryDataSet = getDataReal(bboxes.get(0));
synchronized (MapWithAIDataUtils.GetDataRunnable.class) {
dataSet.mergeFrom(temporaryDataSet);
}
} else {
final Collection<GetDataRunnable> tasks = bboxes.parallelStream()
.map(tBbox -> new GetDataRunnable(tBbox, dataSet, null)).collect(Collectors.toList());
tasks.forEach(GetDataRunnable::fork);
tasks.forEach(GetDataRunnable::join);
}
if (Objects.nonNull(monitor)) {
monitor.finishTask();
monitor.close();
}
/* Microsoft buildings don't have a source, so we add one */
MapWithAIDataUtils.addSourceTags(dataSet, "building", "Microsoft");
return dataSet;
}
}
private MapWithAIDataUtils() {
// Hide the constructor
}
@ -214,31 +114,16 @@ public final class MapWithAIDataUtils {
*/
public static DataSet getData(List<BBox> bbox) {
final DataSet dataSet = new DataSet();
final List<BBox> realBBoxes = bbox.stream().filter(BBox::isValid).collect(Collectors.toList());
final List<BBox> realBBoxes = bbox.stream().filter(BBox::isValid).distinct().collect(Collectors.toList());
final PleaseWaitProgressMonitor monitor = new PleaseWaitProgressMonitor();
if (SwingUtilities.isEventDispatchThread()) {
try {
EventQueue.invokeAndWait(() -> startMonitor(monitor));
} catch (InvocationTargetException e) {
Logging.debug(e);
} catch (InterruptedException e) {
Logging.debug(e);
Thread.currentThread().interrupt();
}
} else {
startMonitor(monitor);
}
monitor.setCancelable(Boolean.FALSE);
getForkJoinPool().invoke(new GetDataRunnable(realBBoxes, dataSet, monitor));
monitor.finishTask();
monitor.close();
return dataSet;
}
private static void startMonitor(PleaseWaitProgressMonitor monitor) {
monitor.setCancelable(Boolean.FALSE);
monitor.beginTask(tr("Downloading {0} data", MapWithAIPlugin.NAME));
monitor.indeterminateSubTask(null);
}
/**
* @return The {@link ForkJoinPool} for MapWithAI use.
*/
@ -288,7 +173,8 @@ public final class MapWithAIDataUtils {
* @param layer The {@link MapWithAILayer} to add data to
*/
public static void getMapWithAIData(MapWithAILayer layer) {
final List<OsmDataLayer> osmLayers = MainApplication.getLayerManager().getLayersOfType(OsmDataLayer.class);
final List<OsmDataLayer> osmLayers = MainApplication.getLayerManager().getLayersOfType(OsmDataLayer.class)
.stream().filter(obj -> !MapWithAILayer.class.isInstance(obj)).collect(Collectors.toList());
for (final OsmDataLayer osmLayer : osmLayers) {
if (!osmLayer.isLocked()) {
getMapWithAIData(layer, osmLayer);

Wyświetl plik

@ -182,4 +182,8 @@ public final class MapWithAIPreferenceHelper {
layer.setSwitchLayers(selected);
}
}
public static double getMaxNodeDistance() {
return Config.getPref().getDouble(MapWithAIPlugin.NAME.concat(".duplicatenodedistance"), 0.6);
}
}

Wyświetl plik

@ -0,0 +1,58 @@
// License: GPL. For details, see LICENSE file.
package org.openstreetmap.josm.plugins.mapwithai.backend;
import static org.openstreetmap.josm.tools.I18n.tr;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import java.util.List;
import org.openstreetmap.josm.actions.JosmAction;
import org.openstreetmap.josm.command.Command;
import org.openstreetmap.josm.data.UndoRedoHandler;
import org.openstreetmap.josm.data.osm.Way;
import org.openstreetmap.josm.gui.MainApplication;
import org.openstreetmap.josm.plugins.mapwithai.MapWithAIPlugin;
import org.openstreetmap.josm.plugins.mapwithai.commands.MergeDuplicateWays;
import org.openstreetmap.josm.tools.Logging;
import org.openstreetmap.josm.tools.Shortcut;
/**
* @author Taylor Smock
*/
public class MergeDuplicateWaysAction extends JosmAction {
private static final long serialVersionUID = 8971004636405132635L;
private static final String DESCRIPTION = "Attempt to merge potential duplicate ways";
public MergeDuplicateWaysAction() {
super(tr("{0}: ".concat(DESCRIPTION), MapWithAIPlugin.NAME), null, tr(DESCRIPTION),
Shortcut.registerShortcut("data:attemptmergeway",
tr(DESCRIPTION), KeyEvent.VK_EXCLAMATION_MARK,
Shortcut.ALT_CTRL_SHIFT),
true);
}
@Override
public void actionPerformed(ActionEvent e) {
List<Way> ways = new ArrayList<>(MainApplication.getLayerManager().getActiveDataSet().getSelectedWays());
Command command = null;
int i = 0;
do {
if (ways.size() == 2) {
command = new MergeDuplicateWays(ways.get(0), ways.get(1));
} else if (ways.size() == 1) {
command = new MergeDuplicateWays(ways.get(0));
} else if (ways.isEmpty()) {
command = new MergeDuplicateWays(MainApplication.getLayerManager().getActiveDataSet());
}
if (command != null) {
UndoRedoHandler.getInstance().add(command);
i++;
Logging.error(Integer.toString(i));
}
} while (command != null && i < 10);
}
}

Wyświetl plik

@ -0,0 +1,60 @@
// License: GPL. For details, see LICENSE file.
package org.openstreetmap.josm.plugins.mapwithai.backend;
import java.io.InputStream;
import org.openstreetmap.josm.data.osm.DataSet;
import org.openstreetmap.josm.data.osm.OsmPrimitive;
import org.openstreetmap.josm.data.osm.PrimitiveData;
import org.openstreetmap.josm.gui.progress.NullProgressMonitor;
import org.openstreetmap.josm.gui.progress.ProgressMonitor;
import org.openstreetmap.josm.io.IllegalDataException;
import org.openstreetmap.josm.io.OsmReader;
import org.openstreetmap.josm.plugins.mapwithai.MapWithAIPlugin;
import org.openstreetmap.josm.tools.Logging;
/**
* TODO remove this class in January 2019 (if required patch is pulled)
* Parser for the Osm API (XML output). Read from an input stream and construct a dataset out of it.
*
* For each xml element, there is a dedicated method.
* The XMLStreamReader cursor points to the start of the element, when the method is
* entered, and it must point to the end of the same element, when it is exited.
*/
public class OsmReaderCustom extends OsmReader {
protected OsmReaderCustom(boolean convertUnknownToTags) {
// Restricts visibility
try {
this.convertUnknownToTags = convertUnknownToTags;
} catch (Exception e) {
Logging.error("{0}: For best results, use JOSM >r15470", MapWithAIPlugin.NAME);
}
}
@Override
protected OsmPrimitive buildPrimitive(PrimitiveData pd) {
Long serverId = pd.getUniqueId();
OsmPrimitive p = super.buildPrimitive(pd);
p.put("server_id", Long.toString(serverId));
return p;
}
/**
* Parse the given input source and return the dataset.
*
* @param source the source input stream. Must not be null.
* @param progressMonitor the progress monitor. If null, {@link NullProgressMonitor#INSTANCE} is assumed
* @param convertUnknownToTags true if unknown xml attributes should be kept as tags
* @param saveOriginalId if true, keep the original id (as a tag, "current_id")
*
* @return the dataset with the parsed data
* @throws IllegalDataException if an error was found while parsing the data from the source
* @throws IllegalArgumentException if source is null
* @since xxx
*/
public static DataSet parseDataSet(InputStream source, ProgressMonitor progressMonitor,
boolean convertUnknownToTags)
throws IllegalDataException {
return new OsmReaderCustom(convertUnknownToTags).doParseDataSet(source, progressMonitor);
}
}

Wyświetl plik

@ -0,0 +1,329 @@
package org.openstreetmap.josm.plugins.mapwithai.commands;
import static org.openstreetmap.josm.tools.I18n.tr;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.openstreetmap.josm.command.ChangeCommand;
import org.openstreetmap.josm.command.Command;
import org.openstreetmap.josm.command.DeleteCommand;
import org.openstreetmap.josm.command.SequenceCommand;
import org.openstreetmap.josm.data.osm.DataSet;
import org.openstreetmap.josm.data.osm.Node;
import org.openstreetmap.josm.data.osm.OsmPrimitive;
import org.openstreetmap.josm.data.osm.Way;
import org.openstreetmap.josm.plugins.mapwithai.MapWithAIPlugin;
import org.openstreetmap.josm.spi.preferences.Config;
import org.openstreetmap.josm.tools.Logging;
import org.openstreetmap.josm.tools.Pair;
public class MergeDuplicateWays extends Command {
public static final String ORIG_ID = "orig_id";
private Way way1;
private Way way2;
private List<Command> commands;
public MergeDuplicateWays(DataSet data) {
this(data, null, null);
}
public MergeDuplicateWays(DataSet data, Way way1) {
this(data, way1, null);
}
public MergeDuplicateWays(Way way1) {
this(way1.getDataSet(), way1, null);
}
public MergeDuplicateWays(Way way1, Way way2) {
this(way1.getDataSet(), way1, way2);
}
public MergeDuplicateWays(DataSet data, Way way1, Way way2) {
super(data);
this.way1 = way1;
this.way2 = way2;
commands = new ArrayList<>();
}
@Override
public boolean executeCommand() {
if (commands.isEmpty()) {
if (way1 == null && way2 == null) {
filterDataSet(getAffectedDataSet(), commands);
} else if (way1 != null && way2 == null) {
checkForDuplicateWays(way1, commands);
} else {
Command command = checkForDuplicateWays(way1, way2);
if (command != null) {
commands.add(command);
command.executeCommand();
}
}
} else {
for (Command command : commands) {
command.executeCommand();
}
}
return true;
}
@Override
public void undoCommand() {
for (Command tCommand : commands) {
tCommand.undoCommand();
}
}
public static void filterDataSet(DataSet dataSet, List<Command> commands) {
final List<Way> ways = new ArrayList<>(
dataSet.getWays().parallelStream().filter(prim -> !prim.isIncomplete()).collect(Collectors.toList()));
for (int i = 0; i < ways.size(); i++) {
Way way1 = ways.get(i);
Collection<Way> nearbyWays = dataSet.searchWays(way1.getBBox()).parallelStream()
.filter(way -> !way.isDeleted()).collect(Collectors.toList());
nearbyWays.remove(way1);
for (Way way2 : nearbyWays) {
Command command = checkForDuplicateWays(way1, way2);
Collection<OsmPrimitive> deletedWays = new ArrayList<>();
if (command != null) {
commands.add(command);
command.executeCommand();
command.fillModifiedData(new ArrayList<>(), deletedWays, new ArrayList<>());
if (!deletedWays.contains(way1) && !deletedWays.contains(way2)) {
commands.add(command);
}
ways.remove(way2);
}
}
}
}
/**
* Check for ways that are (partial) duplicates, and if so merge them
*
* @param way A way to check
* @return non-null command if there are duplicate ways
*/
public static void checkForDuplicateWays(Way way, List<Command> commands) {
Collection<Way> nearbyWays = way.getDataSet().searchWays(way.getBBox());
nearbyWays.remove(way);
for (Way way2 : nearbyWays) {
if (!way2.isDeleted()) {
Command tCommand = checkForDuplicateWays(way, way2);
if (tCommand != null) {
commands.add(tCommand);
tCommand.executeCommand();
}
}
}
}
/**
* Check if ways are (partial) duplicates, and if so create a command to merge
* them
*
* @param way1 A way to check
* @param way2 A way to check
* @return non-null command if they are duplicate ways
*/
public static Command checkForDuplicateWays(Way way1, Way way2) {
Command returnCommand = null;
final Map<Pair<Integer, Node>, Map<Integer, Node>> duplicateNodes = getDuplicateNodes(way1, way2);
Set<Entry<Pair<Integer, Node>, Map<Integer, Node>>> duplicateEntrySet = duplicateNodes.entrySet();
Set<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> compressed = duplicateNodes.entrySet().stream()
.map(entry -> new Pair<Pair<Integer, Node>, Pair<Integer, Node>>(entry.getKey(),
new Pair<Integer, Node>(entry.getValue().entrySet().iterator().next().getKey(),
entry.getValue().entrySet().iterator().next().getValue())))
.sorted((pair1, pair2) -> pair1.a.a - pair2.a.a).collect(Collectors.toSet());
if (compressed.parallelStream().anyMatch(entry -> entry.a.b.isDeleted() || entry.b.b.isDeleted())) {
Logging.error("Bad node");
}
if (compressed.size() > 1
&& duplicateEntrySet.parallelStream().noneMatch(entry -> entry.getValue().size() > 1)) {
List<Integer> initial = compressed.stream().map(entry -> entry.a.a).sorted().collect(Collectors.toList());
List<Integer> after = compressed.stream().map(entry -> entry.b.a).sorted().collect(Collectors.toList());
if (sorted(initial) && sorted(after)) {
returnCommand = mergeWays(way1, way2, compressed);
}
} else if (compressed.isEmpty() && way1.hasKey(ORIG_ID) && way1.get(ORIG_ID).equals(way2.get(ORIG_ID))) {
returnCommand = mergeWays(way1, way2, compressed);
}
return returnCommand;
}
/**
* Merge ways with multiple common nodes
*
* @param way1 The way to keep
* @param way2 The way to remove while moving its nodes to way1
* @param compressed The duplicate nodes
* @return A command to merge ways, null if not possible
*/
public static Command mergeWays(Way way1, Way way2,
Set<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> compressed) {
Command command = null;
if (compressed.size() > 1 || (way1.hasKey(ORIG_ID) && way1.get(ORIG_ID).equals(way2.get(ORIG_ID)))) {
Set<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> realSet = new LinkedHashSet<>(compressed);
boolean sameDirection = checkDirection(realSet);
List<Node> way2Nodes = way2.getNodes();
if (!sameDirection) {
Collections.reverse(way2Nodes);
realSet = realSet.stream().map(pair -> {
pair.b.a = way2Nodes.size() - pair.b.a - 1;
return pair;
}).collect(Collectors.toSet());
}
int last = realSet.stream().mapToInt(pair -> pair.b.a).max().orElse(way2Nodes.size());
int first = realSet.stream().mapToInt(pair -> pair.b.a).min().orElse(0);
List<Node> before = new ArrayList<>();
List<Node> after = new ArrayList<>();
for (Node node : way2Nodes) {
int position = way2Nodes.indexOf(node);
if (position < first) {
before.add(node);
} else if (position > last) {
after.add(node);
}
}
Collections.reverse(before);
Way newWay = new Way(way1);
List<Command> commands = new ArrayList<>();
before.forEach(node -> newWay.addNode(0, node));
after.forEach(newWay::addNode);
if (newWay.getNodesCount() > 0) {
commands.add(new DeleteCommand(way2));
commands.add(new ChangeCommand(way1, newWay));
}
if (commands.contains(null)) {
commands = commands.stream().filter(Objects::nonNull).collect(Collectors.toList());
}
if (!commands.isEmpty()) {
command = new SequenceCommand(tr("Merge ways"), commands);
}
}
return command;
}
/**
* Find a node's duplicate in a set of duplicates
*
* @param node The node to find in the set
* @param compressed The set of node duplicates
* @return The node that the param {@code node} duplicates
*/
public static Node nodeInCompressed(Node node, Set<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> compressed) {
Node returnNode = node;
for (Pair<Pair<Integer, Node>, Pair<Integer, Node>> pair : compressed) {
if (node.equals(pair.a.b)) {
returnNode = pair.b.b;
} else if (node.equals(pair.b.b)) {
returnNode = pair.a.b;
}
if (!node.equals(returnNode)) {
break;
}
}
final Node tReturnNode = returnNode;
node.getKeys().forEach(tReturnNode::put);
return returnNode;
}
/**
* Check if the node pairs increment in the same direction (only checks first
* two pairs), ensure that they are sorted with {@link sorted}
*
* @param compressed The set of duplicate node/placement pairs
* @return true if the node pairs increment in the same direction
*/
public static boolean checkDirection(Set<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> compressed) {
Iterator<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> iterator = compressed.iterator();
boolean returnValue = false;
if (compressed.size() > 1) {
Pair<Pair<Integer, Node>, Pair<Integer, Node>> first = iterator.next();
Pair<Pair<Integer, Node>, Pair<Integer, Node>> second = iterator.next();
boolean way1Forward = first.a.a < second.a.a;
boolean way2Forward = first.b.a < second.b.a;
returnValue = way1Forward == way2Forward;
} else if (compressed.size() == 1) {
Pair<Pair<Integer, Node>, Pair<Integer, Node>> first = iterator.next();
returnValue = (first.a.a == 0 && first.b.a != 0) || (first.a.a != 0 && first.b.a == 0);
}
return returnValue;
}
/**
* Check if a list is a consecutively increasing number list
*
* @param collection The list of integers
* @return true if there are no gaps and it increases
*/
public static boolean sorted(List<Integer> collection) {
boolean returnValue = true;
if (collection.size() > 1) {
Integer last = collection.get(0);
for (int i = 1; i < collection.size(); i++) {
final Integer next = collection.get(i);
if (next - last != 1) {
returnValue = false;
break;
}
last = next;
}
}
return returnValue;
}
/**
* Get duplicate nodes from two ways
*
* @param way1 An initial way with nodes
* @param way2 A way that may have duplicate nodes with way1
* @return A map of node -> node(s) duplicates
*/
public static Map<Pair<Integer, Node>, Map<Integer, Node>> getDuplicateNodes(Way way1, Way way2) {
final Map<Pair<Integer, Node>, Map<Integer, Node>> duplicateNodes = new LinkedHashMap<>();
for (int j = 0; j < way1.getNodesCount(); j++) {
final Node origNode = way1.getNode(j);
for (int k = 0; k < way2.getNodesCount(); k++) {
final Node possDupeNode = way2.getNode(k);
if (origNode.equals(possDupeNode)
|| origNode.getCoor().greatCircleDistance(possDupeNode.getCoor()) < Config.getPref()
.getDouble(MapWithAIPlugin.NAME.concat(".duplicatenodedistance"), 0.5)) {
final Pair<Integer, Node> origNodePair = new Pair<>(j, origNode);
final Map<Integer, Node> dupeNodeMap = duplicateNodes.getOrDefault(origNodePair, new HashMap<>());
dupeNodeMap.put(k, possDupeNode);
duplicateNodes.put(origNodePair, dupeNodeMap);
}
}
}
return duplicateNodes;
}
@Override
public String getDescriptionText() {
return tr("Merge ways");
}
@Override
public void fillModifiedData(Collection<OsmPrimitive> modified, Collection<OsmPrimitive> deleted,
Collection<OsmPrimitive> added) {
for (Command command : commands) {
command.fillModifiedData(modified, deleted, added);
}
}
}

Wyświetl plik

@ -59,7 +59,7 @@ public class MapWithAIPluginTest {
final int originalPaintStyles = MapPaintPrefHelper.INSTANCE.get().size();
final int dataMenuSize = dataMenu.getMenuComponentCount();
plugin = new MapWithAIPlugin(info);
Assert.assertEquals(dataMenuSize + 3, dataMenu.getMenuComponentCount());
Assert.assertEquals(dataMenuSize + 4, dataMenu.getMenuComponentCount());
Assert.assertEquals(originalPaintStyles + 1, MapPaintPrefHelper.INSTANCE.get().size());
}

Wyświetl plik

@ -0,0 +1,228 @@
// License: GPL. For details, see LICENSE file.
package org.openstreetmap.josm.plugins.mapwithai.backend;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.openstreetmap.josm.TestUtils;
import org.openstreetmap.josm.data.coor.LatLon;
import org.openstreetmap.josm.data.osm.DataSet;
import org.openstreetmap.josm.data.osm.Node;
import org.openstreetmap.josm.data.osm.Way;
import org.openstreetmap.josm.plugins.mapwithai.commands.MergeDuplicateWays;
import org.openstreetmap.josm.testutils.JOSMTestRules;
import org.openstreetmap.josm.tools.Pair;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* @author Taylor Smock
*/
public class MergeDuplicateWaysTest {
@Rule
@SuppressFBWarnings("URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
public JOSMTestRules test = new JOSMTestRules().projection();
/**
* Test method for {@link GetDataRunnable#removeCommonTags(DataSet)}.
*/
@Test
public void testRemoveCommonTags() {
DataSet ds1 = new DataSet(TestUtils.newNode("orig_id=2222 highway=secondary"));
GetDataRunnable.removeCommonTags(ds1);
Assert.assertEquals(1, ds1.allPrimitives().stream().mapToInt(prim -> prim.getKeys().size()).sum());
GetDataRunnable.removeCommonTags(ds1);
Assert.assertEquals(1, ds1.allPrimitives().stream().mapToInt(prim -> prim.getKeys().size()).sum());
}
/**
* Test method for {@link GetDataRunnable#filterDataSet(DataSet)}.
*/
@Test
public void testFilterDataSet() {
DataSet ds1 = new DataSet();
Way way1 = TestUtils.newWay("", new Node(new LatLon(0, 1)), new Node(new LatLon(1, 2)));
Way way2 = TestUtils.newWay("", new Node(new LatLon(1, 1)), new Node(new LatLon(1, 2)),
new Node(new LatLon(2, 2)));
way1.getNodes().forEach(ds1::addPrimitive);
way2.getNodes().forEach(ds1::addPrimitive);
ds1.addPrimitive(way1);
ds1.addPrimitive(way2);
new MergeDuplicateWays(ds1).executeCommand();
Assert.assertFalse(way1.isDeleted());
Assert.assertFalse(way2.isDeleted());
way1.getNodes().forEach(node -> Assert.assertFalse(way2.containsNode(node)));
way2.getNodes().forEach(node -> Assert.assertFalse(way1.containsNode(node)));
Node tNode = new Node(new LatLon(1, 1));
ds1.addPrimitive(tNode);
way1.addNode(1, tNode);
new MergeDuplicateWays(ds1).executeCommand();
Assert.assertNotSame(way1.isDeleted(), way2.isDeleted());
Way tWay = way1.isDeleted() ? way2 : way1;
Assert.assertEquals(4, tWay.getNodesCount());
}
/**
* Test method for {@link GetDataRunnable#mergeWays(Way, Way, Set)}.
*/
@Test
public void testMergeWays() {
Way way1 = TestUtils.newWay("", new Node(new LatLon(0, 0)), new Node(new LatLon(1, 1)));
Way way2 = TestUtils.newWay("", new Node(new LatLon(1, 1)), new Node(new LatLon(1, 2)));
Set<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> set = new LinkedHashSet<>();
set.add(new Pair<>(new Pair<>(1, way1.lastNode()), new Pair<>(0, way2.firstNode())));
DataSet ds = new DataSet();
way1.getNodes().forEach(ds::addPrimitive);
way2.getNodes().forEach(ds::addPrimitive);
ds.addPrimitive(way2);
ds.addPrimitive(way1);
// Test with one node in common
Assert.assertNull(MergeDuplicateWays.mergeWays(way1, way2, set));
Assert.assertFalse(way2.isDeleted());
Assert.assertFalse(way1.isDeleted());
Assert.assertEquals(2, way1.getNodesCount());
Node tNode = new Node(new LatLon(0, 0));
ds.addPrimitive(tNode);
way2.addNode(0, tNode);
set.clear(); // we can't use the last pair added
set.add(new Pair<>(new Pair<>(0, way1.firstNode()), new Pair<>(0, way2.firstNode())));
set.add(new Pair<>(new Pair<>(1, way1.lastNode()), new Pair<>(1, way2.getNode(1))));
MergeDuplicateWays.mergeWays(way1, way2, set).executeCommand();
Assert.assertTrue(way2.isDeleted());
Assert.assertFalse(way1.isDeleted());
Assert.assertEquals(3, way1.getNodesCount());
way1 = TestUtils.newWay("", new Node(new LatLon(0, 0)), new Node(new LatLon(1, 1)));
way2 = TestUtils.newWay("", new Node(new LatLon(1, 1)), new Node(new LatLon(1, 2)));
way2.addNode(0, new Node(new LatLon(0, 0)));
ds = new DataSet();
way1.getNodes().forEach(ds::addPrimitive);
way2.getNodes().forEach(ds::addPrimitive);
ds.addPrimitive(way2);
ds.addPrimitive(way1);
List<Node> way2Nodes = way2.getNodes();
Collections.reverse(way2Nodes);
way2.setNodes(way2Nodes);
set.clear();
set.add(new Pair<>(new Pair<>(0, way1.firstNode()), new Pair<>(2, way2.lastNode())));
set.add(new Pair<>(new Pair<>(1, way1.lastNode()), new Pair<>(1, way2.getNode(1))));
MergeDuplicateWays.mergeWays(way1, way2, set).executeCommand();
Assert.assertTrue(way2.isDeleted());
Assert.assertFalse(way1.isDeleted());
Assert.assertEquals(3, way1.getNodesCount());
way1 = TestUtils.newWay("", new Node(new LatLon(0, 0)), new Node(new LatLon(1, 1)));
way2 = TestUtils.newWay("", new Node(new LatLon(1, 1)), new Node(new LatLon(1, 2)));
way2.addNode(0, new Node(new LatLon(0, 0)));
way2.addNode(0, new Node(new LatLon(-1, -1)));
ds = new DataSet();
way1.getNodes().forEach(ds::addPrimitive);
way2.getNodes().forEach(ds::addPrimitive);
ds.addPrimitive(way2);
ds.addPrimitive(way1);
set.clear();
set.add(new Pair<>(new Pair<>(0, way1.firstNode()), new Pair<>(2, way2.getNode(2))));
set.add(new Pair<>(new Pair<>(1, way1.lastNode()), new Pair<>(3, way2.getNode(3))));
List<Node> currentWay2Nodes = way2.getNodes();
MergeDuplicateWays.mergeWays(way1, way2, set).executeCommand();
Assert.assertTrue(way2.isDeleted());
Assert.assertFalse(way1.isDeleted());
Assert.assertEquals(4, way1.getNodesCount());
Assert.assertEquals(currentWay2Nodes.get(0), way1.firstNode());
Assert.assertEquals(currentWay2Nodes.get(1), way1.getNode(1));
}
/**
* Test method for {@link GetDataRunnable#checkDirection(Set)}.
*/
@Test
public void testCheckDirection() {
LinkedHashSet<Pair<Pair<Integer, Node>, Pair<Integer, Node>>> set = new LinkedHashSet<>();
Pair<Pair<Integer, Node>, Pair<Integer, Node>> pair1 = new Pair<>(new Pair<>(0, new Node(new LatLon(0, 0))),
new Pair<>(0, new Node(new LatLon(0, 0))));
Pair<Pair<Integer, Node>, Pair<Integer, Node>> pair2 = new Pair<>(new Pair<>(1, new Node(new LatLon(1, 0))),
new Pair<>(1, new Node(new LatLon(1, 0))));
set.add(pair1);
set.add(pair2);
Assert.assertTrue(MergeDuplicateWays.checkDirection(set));
pair1.a.a = pair1.a.a - 1;
Assert.assertTrue(MergeDuplicateWays.checkDirection(set));
pair1.a.a = pair1.a.a + 3;
Assert.assertFalse(MergeDuplicateWays.checkDirection(set));
pair1.a.a = pair1.a.a - 2;
Assert.assertTrue(MergeDuplicateWays.checkDirection(set));
pair1.b.a = pair1.b.a - 1;
Assert.assertTrue(MergeDuplicateWays.checkDirection(set));
pair1.b.a = pair1.b.a + 3;
Assert.assertFalse(MergeDuplicateWays.checkDirection(set));
pair1.b.a = pair1.b.a - 2;
}
/**
* Test method for {@link GetDataRunnable#sorted(List)}.
*/
@Test
public void testSorted() {
List<Integer> integerList = Arrays.asList(1, 2, 3, 4, 6, 7, 8, 9, 5);
Assert.assertFalse(MergeDuplicateWays.sorted(integerList));
integerList = integerList.stream().sorted().collect(Collectors.toList());
Assert.assertTrue(MergeDuplicateWays.sorted(integerList));
integerList.remove(3);
Assert.assertFalse(MergeDuplicateWays.sorted(integerList));
integerList = Arrays.asList(1);
Assert.assertTrue(MergeDuplicateWays.sorted(integerList));
}
/**
* Test method for {@link GetDataRunnable#getDuplicateNodes(Way, Way)}.
*/
@Test
public void testGetDuplicateNodes() {
Way way1 = TestUtils.newWay("", new Node(new LatLon(0, 0)), new Node(new LatLon(1, 1)));
Way way2 = TestUtils.newWay("", new Node(new LatLon(0, 0)), new Node(new LatLon(1, 1)));
Map<Pair<Integer, Node>, Map<Integer, Node>> duplicateNodes = MergeDuplicateWays.getDuplicateNodes(way1, way2);
Assert.assertEquals(2, duplicateNodes.size());
Assert.assertEquals(2, duplicateNodes.values().stream().flatMap(col -> col.keySet().stream()).count());
way2.addNode(new Node(new LatLon(0, 0)));
duplicateNodes = MergeDuplicateWays.getDuplicateNodes(way1, way2);
Assert.assertEquals(2, duplicateNodes.size());
Assert.assertEquals(3, duplicateNodes.values().stream().flatMap(col -> col.keySet().stream()).count());
way2.addNode(way2.firstNode());
duplicateNodes = MergeDuplicateWays.getDuplicateNodes(way1, way2);
Assert.assertEquals(2, duplicateNodes.size());
Assert.assertEquals(4, duplicateNodes.values().stream().flatMap(col -> col.keySet().stream()).count());
way2.setNodes(way2.getNodes().stream().limit(2).collect(Collectors.toList()));
way2.addNode(new Node(new LatLon(2, 2)));
duplicateNodes = MergeDuplicateWays.getDuplicateNodes(way1, way2);
Assert.assertEquals(2, duplicateNodes.size());
Assert.assertEquals(2, duplicateNodes.values().stream().flatMap(col -> col.keySet().stream()).count());
}
}