001// License: GPL. For details, see LICENSE file. 002package org.openstreetmap.josm.data.validation.tests; 003 004import static org.openstreetmap.josm.data.validation.tests.CrossingWays.HIGHWAY; 005import static org.openstreetmap.josm.data.validation.tests.CrossingWays.RAILWAY; 006import static org.openstreetmap.josm.data.validation.tests.CrossingWays.WATERWAY; 007import static org.openstreetmap.josm.tools.I18n.tr; 008 009import java.util.ArrayList; 010import java.util.Collections; 011import java.util.HashMap; 012import java.util.HashSet; 013import java.util.Iterator; 014import java.util.LinkedHashSet; 015import java.util.List; 016import java.util.Map; 017import java.util.Map.Entry; 018import java.util.Set; 019import java.util.stream.Collectors; 020 021import org.openstreetmap.josm.actions.MergeNodesAction; 022import org.openstreetmap.josm.command.Command; 023import org.openstreetmap.josm.data.coor.LatLon; 024import org.openstreetmap.josm.data.osm.Hash; 025import org.openstreetmap.josm.data.osm.Node; 026import org.openstreetmap.josm.data.osm.OsmPrimitive; 027import org.openstreetmap.josm.data.osm.OsmPrimitiveType; 028import org.openstreetmap.josm.data.osm.Storage; 029import org.openstreetmap.josm.data.osm.Way; 030import org.openstreetmap.josm.data.validation.Severity; 031import org.openstreetmap.josm.data.validation.Test; 032import org.openstreetmap.josm.data.validation.TestError; 033import org.openstreetmap.josm.gui.progress.ProgressMonitor; 034import org.openstreetmap.josm.spi.preferences.Config; 035import org.openstreetmap.josm.tools.MultiMap; 036 037/** 038 * Tests if there are duplicate nodes 039 * 040 * @author frsantos 041 */ 042public class DuplicateNode extends Test { 043 044 private static class NodeHash implements Hash<Object, Object> { 045 046 private final double precision = Config.getPref().getDouble("validator.duplicatenodes.precision", 0.); 047 048 private LatLon roundCoord(LatLon coor) { 049 return new LatLon( 050 Math.round(coor.lat() / precision) * precision, 051 Math.round(coor.lon() / precision) * precision 052 ); 053 } 054 055 @SuppressWarnings("unchecked") 056 private LatLon getLatLon(Object o) { 057 if (o instanceof Node) { 058 LatLon coor = ((Node) o).getCoor(); 059 if (coor == null) 060 return null; 061 if (precision == 0) 062 return coor.getRoundedToOsmPrecision(); 063 return roundCoord(coor); 064 } else if (o instanceof List<?>) { 065 LatLon coor = ((List<Node>) o).get(0).getCoor(); 066 if (coor == null) 067 return null; 068 if (precision == 0) 069 return coor.getRoundedToOsmPrecision(); 070 return roundCoord(coor); 071 } else 072 throw new AssertionError(); 073 } 074 075 @Override 076 public boolean equals(Object k, Object t) { 077 LatLon coorK = getLatLon(k); 078 LatLon coorT = getLatLon(t); 079 return coorK == coorT || (coorK != null && coorT != null && coorK.equals(coorT)); 080 } 081 082 @Override 083 public int getHashCode(Object k) { 084 LatLon coorK = getLatLon(k); 085 return coorK == null ? 0 : coorK.hashCode(); 086 } 087 } 088 089 protected static final int DUPLICATE_NODE = 1; 090 protected static final int DUPLICATE_NODE_MIXED = 2; 091 protected static final int DUPLICATE_NODE_OTHER = 3; 092 protected static final int DUPLICATE_NODE_BUILDING = 10; 093 protected static final int DUPLICATE_NODE_BOUNDARY = 11; 094 protected static final int DUPLICATE_NODE_HIGHWAY = 12; 095 protected static final int DUPLICATE_NODE_LANDUSE = 13; 096 protected static final int DUPLICATE_NODE_NATURAL = 14; 097 protected static final int DUPLICATE_NODE_POWER = 15; 098 protected static final int DUPLICATE_NODE_RAILWAY = 16; 099 protected static final int DUPLICATE_NODE_WATERWAY = 17; 100 101 private static final String[] TYPES = { 102 "none", HIGHWAY, RAILWAY, WATERWAY, "boundary", "power", "natural", "landuse", "building"}; 103 104 /** The map of potential duplicates. 105 * 106 * If there is exactly one node for a given pos, the map includes a pair <pos, Node>. 107 * If there are multiple nodes for a given pos, the map includes a pair 108 * <pos, NodesByEqualTagsMap> 109 */ 110 private Storage<Object> potentialDuplicates; 111 112 /** 113 * Constructor 114 */ 115 public DuplicateNode() { 116 super(tr("Duplicated nodes"), 117 tr("This test checks that there are no nodes at the very same location.")); 118 } 119 120 @Override 121 public void startTest(ProgressMonitor monitor) { 122 super.startTest(monitor); 123 potentialDuplicates = new Storage<>(new NodeHash()); 124 } 125 126 @SuppressWarnings("unchecked") 127 @Override 128 public void endTest() { 129 for (Object v: potentialDuplicates) { 130 if (v instanceof Node) { 131 // just one node at this position. Nothing to report as error 132 continue; 133 } 134 135 // multiple nodes at the same position -> check if all nodes have a distinct elevation 136 List<Node> nodes = (List<Node>) v; 137 Set<String> eles = new HashSet<>(); 138 for (Node n : nodes) { 139 String ele = n.get("ele"); 140 if (ele != null) { 141 eles.add(ele); 142 } 143 } 144 if (eles.size() == nodes.size()) { 145 // All nodes at this position have a distinct elevation. 146 // This is normal in some particular cases (for example, geodesic points in France) 147 // Do not report this as an error 148 continue; 149 } 150 151 // report errors 152 errors.addAll(buildTestErrors(this, nodes)); 153 } 154 super.endTest(); 155 potentialDuplicates = null; 156 } 157 158 /** 159 * Returns the list of "duplicate nodes" errors for the given selection of node and parent test 160 * @param parentTest The parent test of returned errors 161 * @param nodes The nodes selction to look into 162 * @return the list of "duplicate nodes" errors 163 */ 164 public List<TestError> buildTestErrors(Test parentTest, List<Node> nodes) { 165 List<TestError> errors = new ArrayList<>(); 166 167 MultiMap<Map<String, String>, OsmPrimitive> mm = new MultiMap<>(); 168 for (Node n: nodes) { 169 mm.put(n.getKeys(), n); 170 } 171 172 Map<String, Boolean> typeMap = new HashMap<>(); 173 174 // check whether we have multiple nodes at the same position with the same tag set 175 for (Iterator<Map<String, String>> it = mm.keySet().iterator(); it.hasNext();) { 176 Set<OsmPrimitive> primitives = mm.get(it.next()); 177 if (primitives.size() > 1) { 178 179 for (String type: TYPES) { 180 typeMap.put(type, Boolean.FALSE); 181 } 182 183 for (OsmPrimitive p : primitives) { 184 if (p.getType() == OsmPrimitiveType.NODE) { 185 Node n = (Node) p; 186 List<OsmPrimitive> lp = n.getReferrers(); 187 for (OsmPrimitive sp: lp) { 188 if (sp.getType() == OsmPrimitiveType.WAY) { 189 boolean typed = false; 190 Way w = (Way) sp; 191 Map<String, String> keys = w.getKeys(); 192 for (String type: typeMap.keySet()) { 193 if (keys.containsKey(type)) { 194 typeMap.put(type, Boolean.TRUE); 195 typed = true; 196 } 197 } 198 if (!typed) { 199 typeMap.put("none", Boolean.TRUE); 200 } 201 } 202 } 203 } 204 } 205 206 long nbType = typeMap.entrySet().stream().filter(Entry::getValue).count(); 207 208 if (nbType > 1) { 209 errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_MIXED) 210 .message(tr("Mixed type duplicated nodes")) 211 .primitives(primitives) 212 .build()); 213 } else if (typeMap.get(HIGHWAY)) { 214 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_HIGHWAY) 215 .message(tr("Highway duplicated nodes")) 216 .primitives(primitives) 217 .build()); 218 } else if (typeMap.get(RAILWAY)) { 219 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_RAILWAY) 220 .message(tr("Railway duplicated nodes")) 221 .primitives(primitives) 222 .build()); 223 } else if (typeMap.get(WATERWAY)) { 224 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_WATERWAY) 225 .message(tr("Waterway duplicated nodes")) 226 .primitives(primitives) 227 .build()); 228 } else if (typeMap.get("boundary")) { 229 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BOUNDARY) 230 .message(tr("Boundary duplicated nodes")) 231 .primitives(primitives) 232 .build()); 233 } else if (typeMap.get("power")) { 234 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_POWER) 235 .message(tr("Power duplicated nodes")) 236 .primitives(primitives) 237 .build()); 238 } else if (typeMap.get("natural")) { 239 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_NATURAL) 240 .message(tr("Natural duplicated nodes")) 241 .primitives(primitives) 242 .build()); 243 } else if (typeMap.get("building")) { 244 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_BUILDING) 245 .message(tr("Building duplicated nodes")) 246 .primitives(primitives) 247 .build()); 248 } else if (typeMap.get("landuse")) { 249 errors.add(TestError.builder(parentTest, Severity.ERROR, DUPLICATE_NODE_LANDUSE) 250 .message(tr("Landuse duplicated nodes")) 251 .primitives(primitives) 252 .build()); 253 } else { 254 errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE_OTHER) 255 .message(tr("Other duplicated nodes")) 256 .primitives(primitives) 257 .build()); 258 } 259 it.remove(); 260 } 261 } 262 263 // check whether we have multiple nodes at the same position with differing tag sets 264 if (!mm.isEmpty()) { 265 List<OsmPrimitive> duplicates = new ArrayList<>(); 266 for (Set<OsmPrimitive> l: mm.values()) { 267 duplicates.addAll(l); 268 } 269 if (duplicates.size() > 1) { 270 errors.add(TestError.builder(parentTest, Severity.WARNING, DUPLICATE_NODE) 271 .message(tr("Nodes at same position")) 272 .primitives(duplicates) 273 .build()); 274 } 275 } 276 return errors; 277 } 278 279 @SuppressWarnings("unchecked") 280 @Override 281 public void visit(Node n) { 282 if (n.isUsable()) { 283 if (potentialDuplicates.get(n) == null) { 284 // in most cases there is just one node at a given position. We 285 // avoid to create an extra object and add remember the node 286 // itself at this position 287 potentialDuplicates.put(n); 288 } else if (potentialDuplicates.get(n) instanceof Node) { 289 // we have an additional node at the same position. Create an extra 290 // object to keep track of the nodes at this position. 291 // 292 Node n1 = (Node) potentialDuplicates.get(n); 293 List<Node> nodes = new ArrayList<>(2); 294 nodes.add(n1); 295 nodes.add(n); 296 potentialDuplicates.put(nodes); 297 } else if (potentialDuplicates.get(n) instanceof List<?>) { 298 // we have multiple nodes at the same position. 299 // 300 List<Node> nodes = (List<Node>) potentialDuplicates.get(n); 301 nodes.add(n); 302 } 303 } 304 } 305 306 /** 307 * Merge the nodes into one. 308 * Copied from UtilsPlugin.MergePointsAction 309 */ 310 @Override 311 public Command fixError(TestError testError) { 312 final Set<Node> nodes = testError.getPrimitives().stream() 313 .filter(Node.class::isInstance) 314 .map(Node.class::cast) 315 // Filter nodes that have already been deleted (see #5764 and #5773) 316 .filter(n -> !n.isDeleted()) 317 .collect(Collectors.toCollection(LinkedHashSet::new)); 318 319 // Merge only if at least 2 nodes remain 320 if (nodes.size() >= 2) { 321 // Use first existing node or first node if all nodes are new 322 Node target = null; 323 for (Node n: nodes) { 324 if (!n.isNew()) { 325 target = n; 326 break; 327 } 328 } 329 if (target == null) { 330 target = nodes.iterator().next(); 331 } 332 333 if (Command.checkOutlyingOrIncompleteOperation(nodes, Collections.singleton(target)) == Command.IS_OK) 334 return MergeNodesAction.mergeNodes(nodes, target); 335 } 336 337 return null; // undoRedo handling done in mergeNodes 338 } 339 340 @Override 341 public boolean isFixable(TestError testError) { 342 if (!(testError.getTester() instanceof DuplicateNode)) return false; 343 // never merge nodes with different tags. 344 if (testError.getCode() == DUPLICATE_NODE) return false; 345 // cannot merge nodes outside download area 346 final Iterator<? extends OsmPrimitive> it = testError.getPrimitives().iterator(); 347 return it.hasNext() && !it.next().isOutsideDownloadArea(); 348 // everything else is ok to merge 349 } 350}