1 /* 2 * Copyright 2006 Google Inc. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.google.common.geometry; 18 19 import com.google.common.base.Preconditions; 20 import com.google.common.collect.Lists; 21 import com.google.common.collect.Sets; 22 23 import java.util.ArrayList; 24 import java.util.Arrays; 25 import java.util.Comparator; 26 import java.util.HashSet; 27 import java.util.List; 28 import java.util.Set; 29 30 public abstract strictfp class S2EdgeIndex { 31 /** 32 * Thicken the edge in all directions by roughly 1% of the edge length when 33 * thickenEdge is true. 34 */ 35 private static final double THICKENING = 0.01; 36 37 /** 38 * Threshold for small angles, that help lenientCrossing to determine whether 39 * two edges are likely to intersect. 40 */ 41 private static final double MAX_DET_ERROR = 1e-14; 42 43 /** 44 * The cell containing each edge, as given in the parallel array 45 * <code>edges</code>. 46 */ 47 private long[] cells; 48 49 /** 50 * The edge contained by each cell, as given in the parallel array 51 * <code>cells</code>. 52 */ 53 private int[] edges; 54 55 /** 56 * No cell strictly below this level appears in mapping. Initially leaf level, 57 * that's the minimum level at which we will ever look for test edges. 58 */ 59 private int minimumS2LevelUsed; 60 61 /** 62 * Has the index been computed already? 63 */ 64 private boolean indexComputed; 65 66 /** 67 * Number of queries so far 68 */ 69 private int queryCount; 70 71 /** 72 * Empties the index in case it already contained something. 73 */ reset()74 public void reset() { 75 minimumS2LevelUsed = S2CellId.MAX_LEVEL; 76 indexComputed = false; 77 queryCount = 0; 78 cells = null; 79 edges = null; 80 } 81 82 /** 83 * Compares [cell1, edge1] to [cell2, edge2], by cell first and edge second. 84 * 85 * @return -1 if [cell1, edge1] is less than [cell2, edge2], 1 if [cell1, 86 * edge1] is greater than [cell2, edge2], 0 otherwise. 87 */ compare(long cell1, int edge1, long cell2, int edge2)88 private static final int compare(long cell1, int edge1, long cell2, int edge2) { 89 if (cell1 < cell2) { 90 return -1; 91 } else if (cell1 > cell2) { 92 return 1; 93 } else if (edge1 < edge2) { 94 return -1; 95 } else if (edge1 > edge2) { 96 return 1; 97 } else { 98 return 0; 99 } 100 } 101 102 /** Computes the index (if it has not been previously done). */ computeIndex()103 public final void computeIndex() { 104 if (indexComputed) { 105 return; 106 } 107 List<Long> cellList = Lists.newArrayList(); 108 List<Integer> edgeList = Lists.newArrayList(); 109 for (int i = 0; i < getNumEdges(); ++i) { 110 S2Point from = edgeFrom(i); 111 S2Point to = edgeTo(i); 112 ArrayList<S2CellId> cover = Lists.newArrayList(); 113 int level = getCovering(from, to, true, cover); 114 minimumS2LevelUsed = Math.min(minimumS2LevelUsed, level); 115 for (S2CellId cellId : cover) { 116 cellList.add(cellId.id()); 117 edgeList.add(i); 118 } 119 } 120 cells = new long[cellList.size()]; 121 edges = new int[edgeList.size()]; 122 for (int i = 0; i < cells.length; i++) { 123 cells[i] = cellList.get(i); 124 edges[i] = edgeList.get(i); 125 } 126 sortIndex(); 127 indexComputed = true; 128 } 129 130 /** Sorts the parallel <code>cells</code> and <code>edges</code> arrays. */ sortIndex()131 private void sortIndex() { 132 // create an array of indices and sort based on the values in the parallel 133 // arrays at each index 134 Integer[] indices = new Integer[cells.length]; 135 for (int i = 0; i < indices.length; i++) { 136 indices[i] = i; 137 } 138 Arrays.sort(indices, new Comparator<Integer>() { 139 @Override 140 public int compare(Integer index1, Integer index2) { 141 return S2EdgeIndex.compare(cells[index1], edges[index1], cells[index2], edges[index2]); 142 } 143 }); 144 // copy the cells and edges in the order given by the sorted list of indices 145 long[] newCells = new long[cells.length]; 146 int[] newEdges = new int[edges.length]; 147 for (int i = 0; i < indices.length; i++) { 148 newCells[i] = cells[indices[i]]; 149 newEdges[i] = edges[indices[i]]; 150 } 151 // replace the cells and edges with the sorted arrays 152 cells = newCells; 153 edges = newEdges; 154 } 155 isIndexComputed()156 public final boolean isIndexComputed() { 157 return indexComputed; 158 } 159 160 /** 161 * Tell the index that we just received a new request for candidates. Useful 162 * to compute when to switch to quad tree. 163 */ incrementQueryCount()164 protected final void incrementQueryCount() { 165 ++queryCount; 166 } 167 168 /** 169 * If the index hasn't been computed yet, looks at how much work has gone into 170 * iterating using the brute force method, and how much more work is planned 171 * as defined by 'cost'. If it were to have been cheaper to use a quad tree 172 * from the beginning, then compute it now. This guarantees that we will never 173 * use more than twice the time we would have used had we known in advance 174 * exactly how many edges we would have wanted to test. It is the theoretical 175 * best. 176 * 177 * The value 'n' is the number of iterators we expect to request from this 178 * edge index. 179 * 180 * If we have m data edges and n query edges, then the brute force cost is m 181 * * n * testCost where testCost is taken to be the cost of 182 * EdgeCrosser.robustCrossing, measured to be about 30ns at the time of this 183 * writing. 184 * 185 * If we compute the index, the cost becomes: m * costInsert + n * 186 * costFind(m) 187 * 188 * - costInsert can be expected to be reasonably stable, and was measured at 189 * 1200ns with the BM_QuadEdgeInsertionCost benchmark. 190 * 191 * - costFind depends on the length of the edge . For m=1000 edges, we got 192 * timings ranging from 1ms (edge the length of the polygon) to 40ms. The 193 * latter is for very long query edges, and needs to be optimized. We will 194 * assume for the rest of the discussion that costFind is roughly 3ms. 195 * 196 * When doing one additional query, the differential cost is m * testCost - 197 * costFind(m) With the numbers above, it is better to use the quad tree (if 198 * we have it) if m >= 100. 199 * 200 * If m = 100, 30 queries will give m*n*testCost = m * costInsert = 100ms, 201 * while the marginal cost to find is 3ms. Thus, this is a reasonable thing to 202 * do. 203 */ predictAdditionalCalls(int n)204 public final void predictAdditionalCalls(int n) { 205 if (indexComputed) { 206 return; 207 } 208 if (getNumEdges() > 100 && (queryCount + n) > 30) { 209 computeIndex(); 210 } 211 } 212 213 /** 214 * Overwrite these functions to give access to the underlying data. The 215 * function getNumEdges() returns the number of edges in the index, while 216 * edgeFrom(index) and edgeTo(index) return the "from" and "to" endpoints of 217 * the edge at the given index. 218 */ getNumEdges()219 protected abstract int getNumEdges(); 220 edgeFrom(int index)221 protected abstract S2Point edgeFrom(int index); 222 edgeTo(int index)223 protected abstract S2Point edgeTo(int index); 224 225 /** 226 * Appends to "candidateCrossings" all edge references which may cross the 227 * given edge. This is done by covering the edge and then finding all 228 * references of edges whose coverings overlap this covering. Parent cells are 229 * checked level by level. Child cells are checked all at once by taking 230 * advantage of the natural ordering of S2CellIds. 231 */ findCandidateCrossings(S2Point a, S2Point b, List<Integer> candidateCrossings)232 protected void findCandidateCrossings(S2Point a, S2Point b, List<Integer> candidateCrossings) { 233 Preconditions.checkState(indexComputed); 234 ArrayList<S2CellId> cover = Lists.newArrayList(); 235 getCovering(a, b, false, cover); 236 237 // Edge references are inserted into the map once for each covering cell, so 238 // absorb duplicates here 239 Set<Integer> uniqueSet = new HashSet<Integer>(); 240 getEdgesInParentCells(cover, uniqueSet); 241 242 // TODO(user): An important optimization for long query 243 // edges (Contains queries): keep a bounding cap and clip the query 244 // edge to the cap before starting the descent. 245 getEdgesInChildrenCells(a, b, cover, uniqueSet); 246 247 candidateCrossings.clear(); 248 candidateCrossings.addAll(uniqueSet); 249 } 250 251 /** 252 * Returns the smallest cell containing all four points, or 253 * {@link S2CellId#sentinel()} if they are not all on the same face. The 254 * points don't need to be normalized. 255 */ containingCell(S2Point pa, S2Point pb, S2Point pc, S2Point pd)256 private static S2CellId containingCell(S2Point pa, S2Point pb, S2Point pc, S2Point pd) { 257 S2CellId a = S2CellId.fromPoint(pa); 258 S2CellId b = S2CellId.fromPoint(pb); 259 S2CellId c = S2CellId.fromPoint(pc); 260 S2CellId d = S2CellId.fromPoint(pd); 261 262 if (a.face() != b.face() || a.face() != c.face() || a.face() != d.face()) { 263 return S2CellId.sentinel(); 264 } 265 266 while (!a.equals(b) || !a.equals(c) || !a.equals(d)) { 267 a = a.parent(); 268 b = b.parent(); 269 c = c.parent(); 270 d = d.parent(); 271 } 272 return a; 273 } 274 275 /** 276 * Returns the smallest cell containing both points, or Sentinel if they are 277 * not all on the same face. The points don't need to be normalized. 278 */ containingCell(S2Point pa, S2Point pb)279 private static S2CellId containingCell(S2Point pa, S2Point pb) { 280 S2CellId a = S2CellId.fromPoint(pa); 281 S2CellId b = S2CellId.fromPoint(pb); 282 283 if (a.face() != b.face()) { 284 return S2CellId.sentinel(); 285 } 286 287 while (!a.equals(b)) { 288 a = a.parent(); 289 b = b.parent(); 290 } 291 return a; 292 } 293 294 /** 295 * Computes a cell covering of an edge. Clears edgeCovering and returns the 296 * level of the s2 cells used in the covering (only one level is ever used for 297 * each call). 298 * 299 * If thickenEdge is true, the edge is thickened and extended by 1% of its 300 * length. 301 * 302 * It is guaranteed that no child of a covering cell will fully contain the 303 * covered edge. 304 */ getCovering( S2Point a, S2Point b, boolean thickenEdge, ArrayList<S2CellId> edgeCovering)305 private int getCovering( 306 S2Point a, S2Point b, boolean thickenEdge, ArrayList<S2CellId> edgeCovering) { 307 edgeCovering.clear(); 308 309 // Selects the ideal s2 level at which to cover the edge, this will be the 310 // level whose S2 cells have a width roughly commensurate to the length of 311 // the edge. We multiply the edge length by 2*THICKENING to guarantee the 312 // thickening is honored (it's not a big deal if we honor it when we don't 313 // request it) when doing the covering-by-cap trick. 314 double edgeLength = a.angle(b); 315 int idealLevel = S2Projections.MIN_WIDTH.getMaxLevel(edgeLength * (1 + 2 * THICKENING)); 316 317 S2CellId containingCellId; 318 if (!thickenEdge) { 319 containingCellId = containingCell(a, b); 320 } else { 321 if (idealLevel == S2CellId.MAX_LEVEL) { 322 // If the edge is tiny, instabilities are more likely, so we 323 // want to limit the number of operations. 324 // We pretend we are in a cell much larger so as to trigger the 325 // 'needs covering' case, so we won't try to thicken the edge. 326 containingCellId = (new S2CellId(0xFFF0)).parent(3); 327 } else { 328 S2Point pq = S2Point.mul(S2Point.minus(b, a), THICKENING); 329 S2Point ortho = 330 S2Point.mul(S2Point.normalize(S2Point.crossProd(pq, a)), edgeLength * THICKENING); 331 S2Point p = S2Point.minus(a, pq); 332 S2Point q = S2Point.add(b, pq); 333 // If p and q were antipodal, the edge wouldn't be lengthened, 334 // and it could even flip! This is not a problem because 335 // idealLevel != 0 here. The farther p and q can be is roughly 336 // a quarter Earth away from each other, so we remain 337 // Theta(THICKENING). 338 containingCellId = 339 containingCell(S2Point.minus(p, ortho), S2Point.add(p, ortho), S2Point.minus(q, ortho), 340 S2Point.add(q, ortho)); 341 } 342 } 343 344 // Best case: edge is fully contained in a cell that's not too big. 345 if (!containingCellId.equals(S2CellId.sentinel()) 346 && containingCellId.level() >= idealLevel - 2) { 347 edgeCovering.add(containingCellId); 348 return containingCellId.level(); 349 } 350 351 if (idealLevel == 0) { 352 // Edge is very long, maybe even longer than a face width, so the 353 // trick below doesn't work. For now, we will add the whole S2 sphere. 354 // TODO(user): Do something a tad smarter (and beware of the 355 // antipodal case). 356 for (S2CellId cellid = S2CellId.begin(0); !cellid.equals(S2CellId.end(0)); 357 cellid = cellid.next()) { 358 edgeCovering.add(cellid); 359 } 360 return 0; 361 } 362 // TODO(user): Check trick below works even when vertex is at 363 // interface 364 // between three faces. 365 366 // Use trick as in S2PolygonBuilder.PointIndex.findNearbyPoint: 367 // Cover the edge by a cap centered at the edge midpoint, then cover 368 // the cap by four big-enough cells around the cell vertex closest to the 369 // cap center. 370 S2Point middle = S2Point.normalize(S2Point.div(S2Point.add(a, b), 2)); 371 int actualLevel = Math.min(idealLevel, S2CellId.MAX_LEVEL - 1); 372 S2CellId.fromPoint(middle).getVertexNeighbors(actualLevel, edgeCovering); 373 return actualLevel; 374 } 375 376 /** 377 * Filters a list of entries down to the inclusive range defined by the given 378 * cells, in <code>O(log N)</code> time. 379 * 380 * @param cell1 One side of the inclusive query range. 381 * @param cell2 The other side of the inclusive query range. 382 * @return An array of length 2, containing the start/end indices. 383 */ getEdges(long cell1, long cell2)384 private int[] getEdges(long cell1, long cell2) { 385 // ensure cell1 <= cell2 386 if (cell1 > cell2) { 387 long temp = cell1; 388 cell1 = cell2; 389 cell2 = temp; 390 } 391 // The binary search returns -N-1 to indicate an insertion point at index N, 392 // if an exact match cannot be found. Since the edge indices queried for are 393 // not valid edge indices, we will always get -N-1, so we immediately 394 // convert to N. 395 return new int[]{ 396 -1 - binarySearch(cell1, Integer.MIN_VALUE), 397 -1 - binarySearch(cell2, Integer.MAX_VALUE)}; 398 } 399 binarySearch(long cell, int edge)400 private int binarySearch(long cell, int edge) { 401 int low = 0; 402 int high = cells.length - 1; 403 while (low <= high) { 404 int mid = (low + high) >>> 1; 405 int cmp = compare(cells[mid], edges[mid], cell, edge); 406 if (cmp < 0) { 407 low = mid + 1; 408 } else if (cmp > 0) { 409 high = mid - 1; 410 } else { 411 return mid; 412 } 413 } 414 return -(low + 1); 415 } 416 417 /** 418 * Adds to candidateCrossings all the edges present in any ancestor of any 419 * cell of cover, down to minimumS2LevelUsed. The cell->edge map is in the 420 * variable mapping. 421 */ getEdgesInParentCells(List<S2CellId> cover, Set<Integer> candidateCrossings)422 private void getEdgesInParentCells(List<S2CellId> cover, Set<Integer> candidateCrossings) { 423 // Find all parent cells of covering cells. 424 Set<S2CellId> parentCells = Sets.newHashSet(); 425 for (S2CellId coverCell : cover) { 426 for (int parentLevel = coverCell.level() - 1; parentLevel >= minimumS2LevelUsed; 427 --parentLevel) { 428 if (!parentCells.add(coverCell.parent(parentLevel))) { 429 break; // cell is already in => parents are too. 430 } 431 } 432 } 433 434 // Put parent cell edge references into result. 435 for (S2CellId parentCell : parentCells) { 436 int[] bounds = getEdges(parentCell.id(), parentCell.id()); 437 for (int i = bounds[0]; i < bounds[1]; i++) { 438 candidateCrossings.add(edges[i]); 439 } 440 } 441 } 442 443 /** 444 * Returns true if ab possibly crosses cd, by clipping tiny angles to zero. 445 */ lenientCrossing(S2Point a, S2Point b, S2Point c, S2Point d)446 private static boolean lenientCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { 447 // assert (S2.isUnitLength(a)); 448 // assert (S2.isUnitLength(b)); 449 // assert (S2.isUnitLength(c)); 450 451 double acb = S2Point.crossProd(a, c).dotProd(b); 452 double bda = S2Point.crossProd(b, d).dotProd(a); 453 if (Math.abs(acb) < MAX_DET_ERROR || Math.abs(bda) < MAX_DET_ERROR) { 454 return true; 455 } 456 if (acb * bda < 0) { 457 return false; 458 } 459 double cbd = S2Point.crossProd(c, b).dotProd(d); 460 double dac = S2Point.crossProd(c, a).dotProd(c); 461 if (Math.abs(cbd) < MAX_DET_ERROR || Math.abs(dac) < MAX_DET_ERROR) { 462 return true; 463 } 464 return (acb * cbd >= 0) && (acb * dac >= 0); 465 } 466 467 /** 468 * Returns true if the edge and the cell (including boundary) intersect. 469 */ edgeIntersectsCellBoundary(S2Point a, S2Point b, S2Cell cell)470 private static boolean edgeIntersectsCellBoundary(S2Point a, S2Point b, S2Cell cell) { 471 S2Point[] vertices = new S2Point[4]; 472 for (int i = 0; i < 4; ++i) { 473 vertices[i] = cell.getVertex(i); 474 } 475 for (int i = 0; i < 4; ++i) { 476 S2Point fromPoint = vertices[i]; 477 S2Point toPoint = vertices[(i + 1) % 4]; 478 if (lenientCrossing(a, b, fromPoint, toPoint)) { 479 return true; 480 } 481 } 482 return false; 483 } 484 485 /** 486 * Appends to candidateCrossings the edges that are fully contained in an S2 487 * covering of edge. The covering of edge used is initially cover, but is 488 * refined to eliminate quickly subcells that contain many edges but do not 489 * intersect with edge. 490 */ getEdgesInChildrenCells(S2Point a, S2Point b, List<S2CellId> cover, Set<Integer> candidateCrossings)491 private void getEdgesInChildrenCells(S2Point a, S2Point b, List<S2CellId> cover, 492 Set<Integer> candidateCrossings) { 493 // Put all edge references of (covering cells + descendant cells) into 494 // result. 495 // This relies on the natural ordering of S2CellIds. 496 S2Cell[] children = null; 497 while (!cover.isEmpty()) { 498 S2CellId cell = cover.remove(cover.size() - 1); 499 int[] bounds = getEdges(cell.rangeMin().id(), cell.rangeMax().id()); 500 if (bounds[1] - bounds[0] <= 16) { 501 for (int i = bounds[0]; i < bounds[1]; i++) { 502 candidateCrossings.add(edges[i]); 503 } 504 } else { 505 // Add cells at this level 506 bounds = getEdges(cell.id(), cell.id()); 507 for (int i = bounds[0]; i < bounds[1]; i++) { 508 candidateCrossings.add(edges[i]); 509 } 510 // Recurse on the children -- hopefully some will be empty. 511 if (children == null) { 512 children = new S2Cell[4]; 513 for (int i = 0; i < 4; ++i) { 514 children[i] = new S2Cell(); 515 } 516 } 517 new S2Cell(cell).subdivide(children); 518 for (S2Cell child : children) { 519 // TODO(user): Do the check for the four cells at once, 520 // as it is enough to check the four edges between the cells. At 521 // this time, we are checking 16 edges, 4 times too many. 522 // 523 // Note that given the guarantee of AppendCovering, it is enough 524 // to check that the edge intersect with the cell boundary as it 525 // cannot be fully contained in a cell. 526 if (edgeIntersectsCellBoundary(a, b, child)) { 527 cover.add(child.id()); 528 } 529 } 530 } 531 } 532 } 533 534 /* 535 * An iterator on data edges that may cross a query edge (a,b). Create the 536 * iterator, call getCandidates(), then hasNext()/next() repeatedly. 537 * 538 * The current edge in the iteration has index index(), goes between from() 539 * and to(). 540 */ 541 public static class DataEdgeIterator { 542 /** 543 * The structure containing the data edges. 544 */ 545 private final S2EdgeIndex edgeIndex; 546 547 /** 548 * Tells whether getCandidates() obtained the candidates through brute force 549 * iteration or using the quad tree structure. 550 */ 551 private boolean isBruteForce; 552 553 /** 554 * Index of the current edge and of the edge before the last next() call. 555 */ 556 private int currentIndex; 557 558 /** 559 * Cache of edgeIndex.getNumEdges() so that hasNext() doesn't make an extra 560 * call 561 */ 562 private int numEdges; 563 564 /** 565 * All the candidates obtained by getCandidates() when we are using a 566 * quad-tree (i.e. isBruteForce = false). 567 */ 568 ArrayList<Integer> candidates; 569 570 /** 571 * Index within array above. We have: currentIndex = 572 * candidates.get(currentIndexInCandidates). 573 */ 574 private int currentIndexInCandidates; 575 DataEdgeIterator(S2EdgeIndex edgeIndex)576 public DataEdgeIterator(S2EdgeIndex edgeIndex) { 577 this.edgeIndex = edgeIndex; 578 candidates = Lists.newArrayList(); 579 } 580 581 /** 582 * Initializes the iterator to iterate over a set of candidates that may 583 * cross the edge (a,b). 584 */ getCandidates(S2Point a, S2Point b)585 public void getCandidates(S2Point a, S2Point b) { 586 edgeIndex.predictAdditionalCalls(1); 587 isBruteForce = !edgeIndex.isIndexComputed(); 588 if (isBruteForce) { 589 edgeIndex.incrementQueryCount(); 590 currentIndex = 0; 591 numEdges = edgeIndex.getNumEdges(); 592 } else { 593 candidates.clear(); 594 edgeIndex.findCandidateCrossings(a, b, candidates); 595 currentIndexInCandidates = 0; 596 if (!candidates.isEmpty()) { 597 currentIndex = candidates.get(0); 598 } 599 } 600 } 601 602 /** 603 * Index of the current edge in the iteration. 604 */ index()605 public int index() { 606 Preconditions.checkState(hasNext()); 607 return currentIndex; 608 } 609 610 /** 611 * False if there are no more candidates; true otherwise. 612 */ hasNext()613 public boolean hasNext() { 614 if (isBruteForce) { 615 return (currentIndex < numEdges); 616 } else { 617 return currentIndexInCandidates < candidates.size(); 618 } 619 } 620 621 /** 622 * Iterate to the next available candidate. 623 */ next()624 public void next() { 625 Preconditions.checkState(hasNext()); 626 if (isBruteForce) { 627 ++currentIndex; 628 } else { 629 ++currentIndexInCandidates; 630 if (currentIndexInCandidates < candidates.size()) { 631 currentIndex = candidates.get(currentIndexInCandidates); 632 } 633 } 634 } 635 } 636 } 637