diff options
author | Michael Bolin <bolinfest@google.com> | 2011-09-22 15:24:39 -0400 |
---|---|---|
committer | Michael Bolin <bolinfest@google.com> | 2011-09-22 15:24:39 -0400 |
commit | 6fb465764728bfc0c2dab23dd61f31784c0b9b57 (patch) | |
tree | 89eab677b8febf769f9fbee2cb92c072846045cd | |
download | s2-geometry-library-java-6fb465764728bfc0c2dab23dd61f31784c0b9b57.tar.gz |
Initial import.
53 files changed, 17134 insertions, 0 deletions
diff --git a/.classpath b/.classpath new file mode 100644 index 0000000..1dd34df --- /dev/null +++ b/.classpath @@ -0,0 +1,10 @@ +<?xml version="1.0" encoding="UTF-8"?> +<classpath> + <classpathentry kind="src" path="src"/> + <classpathentry kind="src" path="tests"/> + <classpathentry kind="lib" path="lib/guava-r09.jar"/> + <classpathentry kind="lib" path="lib/jsr305.jar"/> + <classpathentry kind="lib" path="lib/junit.jar"/> + <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> + <classpathentry kind="output" path="build/eclipse/classes"/> +</classpath> diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..30023e6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ + +build/ diff --git a/.project b/.project new file mode 100644 index 0000000..886a9a9 --- /dev/null +++ b/.project @@ -0,0 +1,17 @@ +<?xml version="1.0" encoding="UTF-8"?> +<projectDescription> + <name>s2-geometry-library-java</name> + <comment></comment> + <projects> + </projects> + <buildSpec> + <buildCommand> + <name>org.eclipse.jdt.core.javabuilder</name> + <arguments> + </arguments> + </buildCommand> + </buildSpec> + <natures> + <nature>org.eclipse.jdt.core.javanature</nature> + </natures> +</projectDescription> @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/build.xml b/build.xml new file mode 100644 index 0000000..ebd834b --- /dev/null +++ b/build.xml @@ -0,0 +1,95 @@ +<project name="s2-geometry-java" default="compile"> + + <property name="src.dir" value="${basedir}/src" /> + <property name="tests.dir" value="${basedir}/tests" /> + <property name="lib.dir" value="${basedir}/lib" /> + <property name="build.dir" value="${basedir}/build" /> + <property name="classes.dir" value="${build.dir}/classes" /> + <property name="project-jarfile" + value="${build.dir}/${ant.project.name}.jar" /> + <property name="testClasses.dir" value="${build.dir}/test" /> + + <path id="classpath.path"> + <fileset dir="${lib.dir}"> + <include name="*.jar" /> + </fileset> + </path> + + <target name="clean" + description="removes all generated files"> + <delete dir="${build.dir}" /> + </target> + + <target name="compile" + description="compiles Java files for the s2 library"> + <mkdir dir="${classes.dir}" /> + <javac srcdir="${src.dir}" + destdir="${classes.dir}" + includeAntRuntime="false"> + <classpath refid="classpath.path" /> + </javac> + </target> + + <target name="jar" + depends="compile" + description="packages the class files as a jar"> + <jar destfile="${project-jarfile}" update="true"> + <fileset dir="${classes.dir}" /> + </jar> + </target> + + <target name="compile-tests" + depends="compile" + description="compile the JUnit tests"> + <mkdir dir="${testClasses.dir}" /> + <javac srcdir="${tests.dir}" + destdir="${testClasses.dir}" + > + <classpath refid="classpath.path" /> + <classpath> + <pathelement location="${classes.dir}" /> + </classpath> + </javac> + </target> + + <macrodef name="testing"> + <attribute name="printsummary" default="off" /> + <attribute name="fork" default="off" /> + <attribute name="forkmode" default="perTest" /> + <sequential> + <antcall target="compile-tests" /> + <junit printsummary="@{printsummary}" + fork="@{fork}" + forkmode="@{forkmode}" + showoutput="true"> + <classpath refid="classpath.path" /> + <classpath> + <pathelement location="${classes.dir}" /> + <pathelement location="${testClasses.dir}" /> + </classpath> + <formatter type="plain" usefile="false" /> + <batchtest haltonfailure="true"> + <fileset dir="${testClasses.dir}"> + <include name="**/*Test.class" /> + </fileset> + </batchtest> + </junit> + </sequential> + </macrodef> + + <target name="test" + description="runs all of the tests"> + <testing printsummary="on" fork="on" forkmode="once" /> + </target> + + <target name="test-forkless" + description="runs all of the tests without forking the process"> + <testing /> + </target> + + <target name="all" + depends="compile,jar,compile-tests,test" + description="build all deliverables for the project" + /> + +</project> diff --git a/lib/guava-r09.jar b/lib/guava-r09.jar Binary files differnew file mode 100644 index 0000000..f8da8b1 --- /dev/null +++ b/lib/guava-r09.jar diff --git a/lib/jsr305.jar b/lib/jsr305.jar Binary files differnew file mode 100644 index 0000000..cf5f561 --- /dev/null +++ b/lib/jsr305.jar diff --git a/lib/junit.jar b/lib/junit.jar Binary files differnew file mode 100644 index 0000000..f28b4ef --- /dev/null +++ b/lib/junit.jar diff --git a/src/com/google/common/geometry/DoubleMath.java b/src/com/google/common/geometry/DoubleMath.java new file mode 100644 index 0000000..8d64b06 --- /dev/null +++ b/src/com/google/common/geometry/DoubleMath.java @@ -0,0 +1,78 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * Defines the Java equivalent of a couple of advanced floating point functions + * that are available in C. + * + */ +strictfp class DoubleMath { + /** Number of significant digits in a double */ + private static final int DIGITS = 52; + + /** A class that represents a double and a magnitude */ + public static class MantissaExponent implements Comparable<MantissaExponent> { + public double mantissa; + public int exp; + + /** No instantiation allowed */ + private MantissaExponent() { + } + + @Override + public int compareTo(MantissaExponent dm) { + return Math.signum(mantissa) * exp < Math.signum(dm.mantissa) * dm.exp ? -1 : + Math.signum(mantissa) * exp > Math.signum(dm.mantissa) * dm.exp ? 1 : mantissa + < dm.mantissa ? -1 : mantissa > dm.mantissa ? 1 : 0; + } + } + + /** + * If v is non-zero return an integer exp, so that (0.5 <= |v|*2^(-exp) < 1). + * this is analogous to the integer part of the return value frexp If v is + * zero return 0. + */ + public static int getExp(double v) { + if (v == 0) { + return 0; + } + return (int) ((0x7ff0000000000000L & Double.doubleToLongBits(v)) >> DIGITS) - 1022; + } + + + /** + * As in C++'s <code>double frexp ( double x , int * exp )</code> from math.h, + * this function separates the mantissa and exponent of a floating-point + * value. + * + * This code certainly does not handle java's non-numerical values (NaN and + * the like). + */ + public static MantissaExponent frexp(double v) { + MantissaExponent dm = new MantissaExponent(); + if (v == 0) { + dm.mantissa = 0; + dm.exp = 0; + return dm; + } + long bits = Double.doubleToLongBits(v); + dm.mantissa = Double.longBitsToDouble((0x800fffffffffffffL & bits) | 0x3fe0000000000000L); + dm.exp = (int) ((0x7ff0000000000000L & bits) >> DIGITS) - 1022; + return dm; + } + +} diff --git a/src/com/google/common/geometry/MutableInteger.java b/src/com/google/common/geometry/MutableInteger.java new file mode 100644 index 0000000..c00b0c6 --- /dev/null +++ b/src/com/google/common/geometry/MutableInteger.java @@ -0,0 +1,84 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * Like an Integer, but mutable :) + * + * Sometimes it is just really convenient to be able to pass a MutableInteger + * as a parameter to a function, or for synchronization purposes (so that you + * can guard access to an int value without creating a separate Object just to + * syncrhonize on). + * + * NOT thread-safe + * + */ +public class MutableInteger { + + private int value; + private Integer cachedIntegerValue = null; + + public MutableInteger(final int i) { + value = i; + } + + public int intValue() { + return value; + } + + public Integer integerValue() { + if (cachedIntegerValue == null) { + cachedIntegerValue = new Integer(intValue()); + } + return cachedIntegerValue; + } + + @Override + public boolean equals(final Object o) { + return o instanceof MutableInteger && ((MutableInteger) o).value == this.value; + } + + @Override + public int hashCode() { + return integerValue().hashCode(); + } + + public void setValue(final int value) { + this.value = value; + cachedIntegerValue = null; + } + + public void increment() { + add(1); + } + + public void add(final int amount) { + setValue(value + amount); + } + + public void decrement() { + subtract(1); + } + + public void subtract(final int amount) { + add(amount * -1); + } + + @Override + public String toString() { + return String.valueOf(value); + } +} diff --git a/src/com/google/common/geometry/R1Interval.java b/src/com/google/common/geometry/R1Interval.java new file mode 100644 index 0000000..e8edbe4 --- /dev/null +++ b/src/com/google/common/geometry/R1Interval.java @@ -0,0 +1,252 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * An R1Interval represents a closed, bounded interval on the real line. It is + * capable of representing the empty interval (containing no points) and + * zero-length intervals (containing a single point). + * + */ + +public strictfp class R1Interval { + private final double[] bounds = new double[2]; + + /** Interval constructor. If lo > hi, the interval is empty. */ + public R1Interval(double lo, double hi) { + bounds[0] = lo; + bounds[1] = hi; + } + + /** + * Returns an empty interval. (Any interval where lo > hi is considered + * empty.) + */ + public static R1Interval empty() { + return new R1Interval(1, 0); + } + + /** + * Convenience method to construct an interval containing a single point. + */ + public static R1Interval fromPoint(double p) { + return new R1Interval(p, p); + } + + /** + * Convenience method to construct the minimal interval containing the two + * given points. This is equivalent to starting with an empty interval and + * calling AddPoint() twice, but it is more efficient. + */ + public static R1Interval fromPointPair(double p1, double p2) { + if (p1 <= p2) { + return new R1Interval(p1, p2); + } else { + return new R1Interval(p2, p1); + } + } + + public double lo() { + return bounds[0]; + } + + public double hi() { + return bounds[1]; + } + + public double bound(int i) { + return bounds[i]; + } + + public double[] bounds() { + return bounds; + } + + public void setLo(double p) { + bounds[0] = p; + } + + public void setHi(double p) { + bounds[1] = p; + } + + /** + * Return true if the interval is empty, i.e. it contains no points. + */ + public boolean isEmpty() { + return lo() > hi(); + } + + /** + * Return the center of the interval. For empty intervals, the result is + * arbitrary. + */ + public double getCenter() { + return 0.5 * (lo() + hi()); + } + + + /** + * Return the length of the interval. The length of an empty interval is + * negative. + */ + public double getLength() { + return hi() - lo(); + } + + + public boolean contains(double p) { + return p >= lo() && p <= hi(); + } + + public boolean interiorContains(double p) { + return p > lo() && p < hi(); + } + + /** Return true if this interval contains the interval 'y'. */ + public boolean contains(R1Interval y) { + if (y.isEmpty()) { + return true; + } + return y.lo() >= lo() && y.hi() <= hi(); + } + + /** + * Return true if the interior of this interval contains the entire interval + * 'y' (including its boundary). + */ + public boolean interiorContains(R1Interval y) { + if (y.isEmpty()) { + return true; + } + return y.lo() > lo() && y.hi() < hi(); + } + + /** + * Return true if this interval intersects the given interval, i.e. if they + * have any points in common. + */ + public boolean intersects(R1Interval y) { + if (lo() <= y.lo()) { + return y.lo() <= hi() && y.lo() <= y.hi(); + } else { + return lo() <= y.hi() && lo() <= hi(); + } + } + + /** + * Return true if the interior of this interval intersects any point of the + * given interval (including its boundary). + */ + public boolean interiorIntersects(R1Interval y) { + return y.lo() < hi() && lo() < y.hi() && lo() < hi() && y.lo() <= y.hi(); + } + + /** Expand the interval so that it contains the given point "p". */ + public R1Interval addPoint(double p) { + if (isEmpty()) { + return R1Interval.fromPoint(p); + } else if (p < lo()) { + return new R1Interval(p, hi()); + } else if (p > hi()) { + return new R1Interval(lo(), p); + } else { + return new R1Interval(lo(), hi()); + } + } + + /** + * Return an interval that contains all points with a distance "radius" of a + * point in this interval. Note that the expansion of an empty interval is + * always empty. + */ + public R1Interval expanded(double radius) { + // assert (radius >= 0); + if (isEmpty()) { + return this; + } + return new R1Interval(lo() - radius, hi() + radius); + } + + /** + * Return the smallest interval that contains this interval and the given + * interval "y". + */ + public R1Interval union(R1Interval y) { + if (isEmpty()) { + return y; + } + if (y.isEmpty()) { + return this; + } + return new R1Interval(Math.min(lo(), y.lo()), Math.max(hi(), y.hi())); + } + + /** + * Return the intersection of this interval with the given interval. Empty + * intervals do not need to be special-cased. + */ + public R1Interval intersection(R1Interval y) { + return new R1Interval(Math.max(lo(), y.lo()), Math.min(hi(), y.hi())); + } + + @Override + public boolean equals(Object that) { + if (that instanceof R1Interval) { + R1Interval y = (R1Interval) that; + // Return true if two intervals contain the same set of points. + return (lo() == y.lo() && hi() == y.hi()) || (isEmpty() && y.isEmpty()); + + } + return false; + } + + @Override + public int hashCode() { + if (isEmpty()) { + return 17; + } + + long value = 17; + value = 37 * value + Double.doubleToLongBits(bounds[0]); + value = 37 * value + Double.doubleToLongBits(bounds[1]); + return (int) (value ^ (value >>> 32)); + } + + public boolean approxEquals(R1Interval y) { + return approxEquals(y, 1e-15); + } + + /** + * Return true if length of the symmetric difference between the two intervals + * is at most the given tolerance. + * + */ + public boolean approxEquals(R1Interval y, double maxError) { + if (isEmpty()) { + return y.getLength() <= maxError; + } + if (y.isEmpty()) { + return getLength() <= maxError; + } + return Math.abs(y.lo() - lo()) + Math.abs(y.hi() - hi()) <= maxError; + } + + @Override + public String toString() { + return "[" + lo() + ", " + hi() + "]"; + } +} diff --git a/src/com/google/common/geometry/R2Vector.java b/src/com/google/common/geometry/R2Vector.java new file mode 100644 index 0000000..70ca580 --- /dev/null +++ b/src/com/google/common/geometry/R2Vector.java @@ -0,0 +1,113 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * R2Vector represents a vector in the two-dimensional space. It defines the + * basic geometrical operations for 2D vectors, e.g. cross product, addition, + * norm, comparison etc. + * + */ +public strictfp class R2Vector { + double x; + double y; + + public R2Vector(double x, double y) { + this.x = x; + this.y = y; + } + + public R2Vector(double[] coord) { + if (coord.length != 2) { + throw new IllegalStateException("Points must have exactly 2 coordinates"); + } + x = coord[0]; + y = coord[1]; + } + + public R2Vector() { + } + + public double get(int index) { + if (index > 1) { + throw new ArrayIndexOutOfBoundsException(index); + } + return index == 0 ? this.x : this.y; + } + + public static R2Vector add(final R2Vector p1, final R2Vector p2) { + return new R2Vector(p1.x + p2.x, p1.y + p2.y); + } + + public static R2Vector mul(final R2Vector p, double m) { + return new R2Vector(m * p.x, m * p.y); + } + + public double norm2() { + return x * x + y * y; + } + + public static double dotProd(final R2Vector p1, final R2Vector p2) { + return p1.x * p2.x + p1.y * p2.y; + } + + public double dotProd(R2Vector that) { + return dotProd(this, that); + } + + public double crossProd(final R2Vector that) { + return this.x * that.y - this.y * that.x; + } + + public boolean lessThan(R2Vector vb) { + if (x < vb.x) { + return true; + } + if (vb.x < x) { + return false; + } + if (y < vb.y) { + return true; + } + return false; + } + + @Override + public boolean equals(Object that) { + if (!(that instanceof R2Vector)) { + return false; + } + R2Vector thatPoint = (R2Vector) that; + return this.x == thatPoint.x && this.y == thatPoint.y; + } + + /** + * Calcualates hashcode based on stored coordinates. Since we want +0.0 and + * -0.0 to be treated the same, we ignore the sign of the coordinates. + */ + @Override + public int hashCode() { + long value = 17; + value += 37 * value + Double.doubleToLongBits(Math.abs(x)); + value += 37 * value + Double.doubleToLongBits(Math.abs(y)); + return (int) (value ^ (value >>> 32)); + } + + @Override + public String toString() { + return "(" + x + ", " + y + ")"; + } +} diff --git a/src/com/google/common/geometry/S1Angle.java b/src/com/google/common/geometry/S1Angle.java new file mode 100644 index 0000000..152052f --- /dev/null +++ b/src/com/google/common/geometry/S1Angle.java @@ -0,0 +1,137 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + + +public strictfp class S1Angle implements Comparable<S1Angle> { + + private double radians; + + public double radians() { + return radians; + } + + public double degrees() { + return radians * (180 / Math.PI); + } + + public long e5() { + return Math.round(degrees() * 1e5); + } + + public long e6() { + return Math.round(degrees() * 1e6); + } + + public long e7() { + return Math.round(degrees() * 1e7); + } + + /** + * The default constructor yields a zero angle. + */ + public S1Angle() { + this.radians = 0; + } + + private S1Angle(double radians) { + this.radians = radians; + } + + /** + * Return the angle between two points, which is also equal to the distance + * between these points on the unit sphere. The points do not need to be + * normalized. + */ + public S1Angle(S2Point x, S2Point y) { + this.radians = x.angle(y); + } + + @Override + public boolean equals(Object that) { + if (that instanceof S1Angle) { + return this.radians() == ((S1Angle) that).radians(); + } + return false; + } + + @Override + public int hashCode() { + long value = Double.doubleToLongBits(radians); + return (int) (value ^ (value >>> 32)); + } + + public boolean lessThan(S1Angle that) { + return this.radians() < that.radians(); + } + + public boolean greaterThan(S1Angle that) { + return this.radians() > that.radians(); + } + + public boolean lessOrEquals(S1Angle that) { + return this.radians() <= that.radians(); + } + + public boolean greaterOrEquals(S1Angle that) { + return this.radians() >= that.radians(); + } + + public static S1Angle max(S1Angle left, S1Angle right) { + return right.greaterThan(left) ? right : left; + } + + public static S1Angle min(S1Angle left, S1Angle right) { + return right.greaterThan(left) ? left : right; + } + + public static S1Angle radians(double radians) { + return new S1Angle(radians); + } + + public static S1Angle degrees(double degrees) { + return new S1Angle(degrees * (Math.PI / 180)); + } + + public static S1Angle e5(long e5) { + return degrees(e5 * 1e-5); + } + + public static S1Angle e6(long e6) { + // Multiplying by 1e-6 isn't quite as accurate as dividing by 1e6, + // but it's about 10 times faster and more than accurate enough. + return degrees(e6 * 1e-6); + } + + public static S1Angle e7(long e7) { + return degrees(e7 * 1e-7); + } + + /** + * Writes the angle in degrees with a "d" suffix, e.g. "17.3745d". By default + * 6 digits are printed; this can be changed using setprecision(). Up to 17 + * digits are required to distinguish one angle from another. + */ + @Override + public String toString() { + return degrees() + "d"; + } + + @Override + public int compareTo(S1Angle that) { + return this.radians < that.radians ? -1 : this.radians > that.radians ? 1 : 0; + } +} diff --git a/src/com/google/common/geometry/S1Interval.java b/src/com/google/common/geometry/S1Interval.java new file mode 100644 index 0000000..fa6f1ad --- /dev/null +++ b/src/com/google/common/geometry/S1Interval.java @@ -0,0 +1,543 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + + +/** + * An S1Interval represents a closed interval on a unit circle (also known as a + * 1-dimensional sphere). It is capable of representing the empty interval + * (containing no points), the full interval (containing all points), and + * zero-length intervals (containing a single point). + * + * Points are represented by the angle they make with the positive x-axis in + * the range [-Pi, Pi]. An interval is represented by its lower and upper bounds + * (both inclusive, since the interval is closed). The lower bound may be + * greater than the upper bound, in which case the interval is "inverted" (i.e. + * it passes through the point (-1, 0)). + * + * Note that the point (-1, 0) has two valid representations, Pi and -Pi. The + * normalized representation of this point internally is Pi, so that endpoints + * of normal intervals are in the range (-Pi, Pi]. However, we take advantage of + * the point -Pi to construct two special intervals: the Full() interval is + * [-Pi, Pi], and the Empty() interval is [Pi, -Pi]. + * + */ + +public strictfp class S1Interval implements Cloneable { + + private final double[] bounds = new double[2]; + + /** + * Both endpoints must be in the range -Pi to Pi inclusive. The value -Pi is + * converted internally to Pi except for the Full() and Empty() intervals. + */ + public S1Interval(double lo, double hi) { + this(lo, hi, false); + } + + /** + * Copy constructor. Assumes that the given interval is valid. + */ + public S1Interval(S1Interval interval) { + bounds[0] = interval.bounds[0]; + bounds[1] = interval.bounds[1]; + } + + /** + * Internal constructor that assumes that both arguments are in the correct + * range, i.e. normalization from -Pi to Pi is already done. + */ + private S1Interval(double lo, double hi, boolean checked) { + bounds[0] = lo; + bounds[1] = hi; + + if (!checked) { + if (lo == -S2.M_PI && hi != S2.M_PI) { + setLo(S2.M_PI); + } + if (hi == -S2.M_PI && lo != S2.M_PI) { + setHi(S2.M_PI); + } + } + // assert (isValid()); + } + + + public static S1Interval empty() { + return new S1Interval(S2.M_PI, -S2.M_PI, true); + } + + public static S1Interval full() { + return new S1Interval(-S2.M_PI, S2.M_PI, true); + } + + /** Convenience method to construct an interval containing a single point. */ + public static S1Interval fromPoint(double p) { + if (p == -S2.M_PI) { + p = S2.M_PI; + } + return new S1Interval(p, p, true); + } + + /** + * Convenience method to construct the minimal interval containing the two + * given points. This is equivalent to starting with an empty interval and + * calling AddPoint() twice, but it is more efficient. + */ + public static S1Interval fromPointPair(double p1, double p2) { + // assert (Math.abs(p1) <= S2.M_PI && Math.abs(p2) <= S2.M_PI); + if (p1 == -S2.M_PI) { + p1 = S2.M_PI; + } + if (p2 == -S2.M_PI) { + p2 = S2.M_PI; + } + if (positiveDistance(p1, p2) <= S2.M_PI) { + return new S1Interval(p1, p2, true); + } else { + return new S1Interval(p2, p1, true); + } + } + + + public double lo() { + return bounds[0]; + } + + public double hi() { + return bounds[1]; + } + + public double bound(int i) { + return bounds[i]; + } + + public double[] bounds() { + return bounds; + } + + public void setLo(double p) { + bounds[0] = p; + // assert (isValid()); + } + + public void setHi(double p) { + bounds[1] = p; + // assert (isValid()); + } + + /** + * An interval is valid if neither bound exceeds Pi in absolute value, and the + * value -Pi appears only in the Empty() and Full() intervals. + */ + public boolean isValid() { + return (Math.abs(lo()) <= S2.M_PI && Math.abs(hi()) <= S2.M_PI + && !(lo() == -S2.M_PI && hi() != S2.M_PI) && !(hi() == -S2.M_PI && lo() != S2.M_PI)); + } + + + /** Return true if the interval contains all points on the unit circle. */ + public boolean isFull() { + return hi() - lo() == 2 * S2.M_PI; + } + + + /** Return true if the interval is empty, i.e. it contains no points. */ + public boolean isEmpty() { + return lo() - hi() == 2 * S2.M_PI; + } + + + /* Return true if lo() > hi(). (This is true for empty intervals.) */ + public boolean isInverted() { + return lo() > hi(); + } + + + /** + * Return the midpoint of the interval. For full and empty intervals, the + * result is arbitrary. + */ + public double getCenter() { + double center = 0.5 * (lo() + hi()); + if (!isInverted()) { + return center; + } + // Return the center in the range (-Pi, Pi]. + return (center <= 0) ? (center + S2.M_PI) : (center - S2.M_PI); + } + + /** + * Return the length of the interval. The length of an empty interval is + * negative. + */ + public double getLength() { + double length = hi() - lo(); + if (length >= 0) { + return length; + } + length += 2 * S2.M_PI; + // Empty intervals have a negative length. + return (length > 0) ? length : -1; + } + + + /** + * Return the complement of the interior of the interval. An interval and its + * complement have the same boundary but do not share any interior values. The + * complement operator is not a bijection, since the complement of a singleton + * interval (containing a single value) is the same as the complement of an + * empty interval. + */ + public S1Interval complement() { + if (lo() == hi()) { + return full(); // Singleton. + } + return new S1Interval(hi(), lo(), true); // Handles + // empty and + // full. + } + + /** Return true if the interval (which is closed) contains the point 'p'. */ + public boolean contains(double p) { + // Works for empty, full, and singleton intervals. + // assert (Math.abs(p) <= S2.M_PI); + if (p == -S2.M_PI) { + p = S2.M_PI; + } + return fastContains(p); + } + + + /** + * Return true if the interval (which is closed) contains the point 'p'. Skips + * the normalization of 'p' from -Pi to Pi. + * + */ + public boolean fastContains(double p) { + if (isInverted()) { + return (p >= lo() || p <= hi()) && !isEmpty(); + } else { + return p >= lo() && p <= hi(); + } + } + + + /** Return true if the interior of the interval contains the point 'p'. */ + public boolean interiorContains(double p) { + // Works for empty, full, and singleton intervals. + // assert (Math.abs(p) <= S2.M_PI); + if (p == -S2.M_PI) { + p = S2.M_PI; + } + + if (isInverted()) { + return p > lo() || p < hi(); + } else { + return (p > lo() && p < hi()) || isFull(); + } + } + + + /** + * Return true if the interval contains the given interval 'y'. Works for + * empty, full, and singleton intervals. + */ + public boolean contains(final S1Interval y) { + // It might be helpful to compare the structure of these tests to + // the simpler Contains(double) method above. + + if (isInverted()) { + if (y.isInverted()) { + return y.lo() >= lo() && y.hi() <= hi(); + } + return (y.lo() >= lo() || y.hi() <= hi()) && !isEmpty(); + } else { + if (y.isInverted()) { + return isFull() || y.isEmpty(); + } + return y.lo() >= lo() && y.hi() <= hi(); + } + } + + /** + * Returns true if the interior of this interval contains the entire interval + * 'y'. Note that x.InteriorContains(x) is true only when x is the empty or + * full interval, and x.InteriorContains(S1Interval(p,p)) is equivalent to + * x.InteriorContains(p). + */ + public boolean interiorContains(final S1Interval y) { + if (isInverted()) { + if (!y.isInverted()) { + return y.lo() > lo() || y.hi() < hi(); + } + return (y.lo() > lo() && y.hi() < hi()) || y.isEmpty(); + } else { + if (y.isInverted()) { + return isFull() || y.isEmpty(); + } + return (y.lo() > lo() && y.hi() < hi()) || isFull(); + } + } + + /** + * Return true if the two intervals contain any points in common. Note that + * the point +/-Pi has two representations, so the intervals [-Pi,-3] and + * [2,Pi] intersect, for example. + */ + public boolean intersects(final S1Interval y) { + if (isEmpty() || y.isEmpty()) { + return false; + } + if (isInverted()) { + // Every non-empty inverted interval contains Pi. + return y.isInverted() || y.lo() <= hi() || y.hi() >= lo(); + } else { + if (y.isInverted()) { + return y.lo() <= hi() || y.hi() >= lo(); + } + return y.lo() <= hi() && y.hi() >= lo(); + } + } + + /** + * Return true if the interior of this interval contains any point of the + * interval 'y' (including its boundary). Works for empty, full, and singleton + * intervals. + */ + public boolean interiorIntersects(final S1Interval y) { + if (isEmpty() || y.isEmpty() || lo() == hi()) { + return false; + } + if (isInverted()) { + return y.isInverted() || y.lo() < hi() || y.hi() > lo(); + } else { + if (y.isInverted()) { + return y.lo() < hi() || y.hi() > lo(); + } + return (y.lo() < hi() && y.hi() > lo()) || isFull(); + } + } + + + /** + * Expand the interval by the minimum amount necessary so that it contains the + * given point "p" (an angle in the range [-Pi, Pi]). + */ + public S1Interval addPoint(double p) { + // assert (Math.abs(p) <= S2.M_PI); + if (p == -S2.M_PI) { + p = S2.M_PI; + } + + if (fastContains(p)) { + return new S1Interval(this); + } + + if (isEmpty()) { + return S1Interval.fromPoint(p); + } else { + // Compute distance from p to each endpoint. + double dlo = positiveDistance(p, lo()); + double dhi = positiveDistance(hi(), p); + if (dlo < dhi) { + return new S1Interval(p, hi()); + } else { + return new S1Interval(lo(), p); + } + // Adding a point can never turn a non-full interval into a full one. + } + } + + /** + * Return an interval that contains all points with a distance "radius" of a + * point in this interval. Note that the expansion of an empty interval is + * always empty. The radius must be non-negative. + */ + public S1Interval expanded(double radius) { + // assert (radius >= 0); + if (isEmpty()) { + return this; + } + + // Check whether this interval will be full after expansion, allowing + // for a 1-bit rounding error when computing each endpoint. + if (getLength() + 2 * radius >= 2 * S2.M_PI - 1e-15) { + return full(); + } + + S1Interval result = + new S1Interval(Math.IEEEremainder(lo() - radius, 2 * S2.M_PI), + Math.IEEEremainder(hi() + radius, 2 * S2.M_PI)); + if (result.lo() == -S2.M_PI) { + result.setLo(S2.M_PI); + } + return result; + } + + + /** + * Return the smallest interval that contains this interval and the given + * interval "y". + */ + public S1Interval union(final S1Interval y) { + // The y.is_full() case is handled correctly in all cases by the code + // below, but can follow three separate code paths depending on whether + // this interval is inverted, is non-inverted but contains Pi, or neither. + + if (y.isEmpty()) { + return this; + } + if (fastContains(y.lo())) { + if (fastContains(y.hi())) { + // Either this interval contains y, or the union of the two + // intervals is the Full() interval. + if (contains(y)) { + return this; // is_full() code path + } + return full(); + } + return new S1Interval(lo(), y.hi(), true); + } + if (fastContains(y.hi())) { + return new S1Interval(y.lo(), hi(), true); + } + + // This interval contains neither endpoint of y. This means that either y + // contains all of this interval, or the two intervals are disjoint. + if (isEmpty() || y.fastContains(lo())) { + return y; + } + + // Check which pair of endpoints are closer together. + double dlo = positiveDistance(y.hi(), lo()); + double dhi = positiveDistance(hi(), y.lo()); + if (dlo < dhi) { + return new S1Interval(y.lo(), hi(), true); + } else { + return new S1Interval(lo(), y.hi(), true); + } + } + + + /** + * Return the smallest interval that contains the intersection of this + * interval with "y". Note that the region of intersection may consist of two + * disjoint intervals. + */ + public S1Interval intersection(final S1Interval y) { + // The y.is_full() case is handled correctly in all cases by the code + // below, but can follow three separate code paths depending on whether + // this interval is inverted, is non-inverted but contains Pi, or neither. + + if (y.isEmpty()) { + return empty(); + } + if (fastContains(y.lo())) { + if (fastContains(y.hi())) { + // Either this interval contains y, or the region of intersection + // consists of two disjoint subintervals. In either case, we want + // to return the shorter of the two original intervals. + if (y.getLength() < getLength()) { + return y; // is_full() code path + } + return this; + } + return new S1Interval(y.lo(), hi(), true); + } + if (fastContains(y.hi())) { + return new S1Interval(lo(), y.hi(), true); + } + + // This interval contains neither endpoint of y. This means that either y + // contains all of this interval, or the two intervals are disjoint. + + if (y.fastContains(lo())) { + return this; // is_empty() okay here + } + // assert (!intersects(y)); + return empty(); + } + + + /** + * Return true if the length of the symmetric difference between the two + * intervals is at most the given tolerance. + */ + public boolean approxEquals(final S1Interval y, double maxError) { + if (isEmpty()) { + return y.getLength() <= maxError; + } + if (y.isEmpty()) { + return getLength() <= maxError; + } + return (Math.abs(Math.IEEEremainder(y.lo() - lo(), 2 * S2.M_PI)) + + Math.abs(Math.IEEEremainder(y.hi() - hi(), 2 * S2.M_PI))) <= maxError; + } + + public boolean approxEquals(final S1Interval y) { + return approxEquals(y, 1e-9); + } + + /** + * Return true if two intervals contains the same set of points. + */ + @Override + public boolean equals(Object that) { + if (that instanceof S1Interval) { + S1Interval thatInterval = (S1Interval) that; + return lo() == thatInterval.lo() && hi() == thatInterval.hi(); + } + return false; + } + + @Override + public int hashCode() { + long value = 17; + value = 37 * value + Double.doubleToLongBits(lo()); + value = 37 * value + Double.doubleToLongBits(hi()); + return (int) ((value >>> 32) ^ value); + } + + @Override + public String toString() { + return "[" + this.lo() + ", " + this.hi() + "]"; + } + + /** + * Compute the distance from "a" to "b" in the range [0, 2*Pi). This is + * equivalent to (drem(b - a - S2.M_PI, 2 * S2.M_PI) + S2.M_PI), except that + * it is more numerically stable (it does not lose precision for very small + * positive distances). + */ + public static double positiveDistance(double a, double b) { + double d = b - a; + if (d >= 0) { + return d; + } + // We want to ensure that if b == Pi and a == (-Pi + eps), + // the return result is approximately 2*Pi and not zero. + return (b + S2.M_PI) - (a - S2.M_PI); + } + + @Override + public Object clone() throws CloneNotSupportedException { + S1Interval clone = (S1Interval) super.clone(); + clone.setLo(lo()); + clone.setHi(hi()); + return clone; + } +} diff --git a/src/com/google/common/geometry/S2.java b/src/com/google/common/geometry/S2.java new file mode 100644 index 0000000..80a994e --- /dev/null +++ b/src/com/google/common/geometry/S2.java @@ -0,0 +1,716 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.base.Preconditions; + +public strictfp class S2 { + // declare some frequently use constants + public static final double M_PI = Math.PI; + public static final double M_1_PI = 1.0 / Math.PI; + public static final double M_PI_2 = Math.PI / 2.0; + public static final double M_PI_4 = Math.PI / 4.0; + public static final double M_SQRT2 = Math.sqrt(2); + public static final double M_E = Math.E; + + // Together these flags define a cell orientation. If SWAP_MASK + // is true, then canonical traversal order is flipped around the + // diagonal (i.e. i and j are swapped with each other). If + // INVERT_MASK is true, then the traversal order is rotated by 180 + // degrees (i.e. the bits of i and j are inverted, or equivalently, + // the axis directions are reversed). + public static final int SWAP_MASK = 0x01; + public static final int INVERT_MASK = 0x02; + + + /** + * kPosToOrientation[pos] -> orientation_modifier + * + * Return a modifier indicating how the orientation of the child subcell with + * the given traversal position [0..3] is related to the orientation of the + * parent cell. The modifier should be XOR-ed with the parent orientation to + * obtain the curve orientation in the child. + * + */ + static final int[] POS_TO_ORIENTATION = {SWAP_MASK, 0, 0, INVERT_MASK + SWAP_MASK}; + + /** + * + * kPosToIJ[orientation][pos] -> ij // // Return the (i,j) index of the + * subcell at the given position 'pos' in the // Hilbert curve traversal order + * with the given orientation. This is the // inverse of the previous table: + * kPosToIJ[r][kIJtoPos[r][ij]] == ij + */ + public static final int[][] POS_TO_IJ = { + // 0 1 2 3 + {0, 1, 3, 2}, // canonical order: (0,0), (0,1), (1,1), (1,0) + {0, 2, 3, 1}, // axes swapped: (0,0), (1,0), (1,1), (0,1) + {3, 2, 0, 1}, // bits inverted: (1,1), (1,0), (0,0), (0,1) + {3, 1, 0, 2}, // swapped & inverted: (1,1), (0,1), (0,0), (1,0) + }; + + /** + * Defines an area or a length cell metric. + */ + public static class Metric { + + private final double deriv; + private final int dim; + + /** + * Defines a cell metric of the given dimension (1 == length, 2 == area). + */ + public Metric(int dim, double deriv) { + this.deriv = deriv; + this.dim = dim; + } + + /** + * The "deriv" value of a metric is a derivative, and must be multiplied by + * a length or area in (s,t)-space to get a useful value. + */ + public double deriv() { + return deriv; + } + + /** Return the value of a metric for cells at the given level. */ + public double getValue(int level) { + return StrictMath.scalb(deriv, dim * (1 - level)); + } + + /** + * Return the level at which the metric has approximately the given value. + * For example, S2::kAvgEdge.GetClosestLevel(0.1) returns the level at which + * the average cell edge length is approximately 0.1. The return value is + * always a valid level. + */ + public int getClosestLevel(double value) { + return getMinLevel(M_SQRT2 * value); + } + + /** + * Return the minimum level such that the metric is at most the given value, + * or S2CellId::kMaxLevel if there is no such level. For example, + * S2::kMaxDiag.GetMinLevel(0.1) returns the minimum level such that all + * cell diagonal lengths are 0.1 or smaller. The return value is always a + * valid level. + */ + public int getMinLevel(double value) { + if (value <= 0) { + return S2CellId.MAX_LEVEL; + } + + // This code is equivalent to computing a floating-point "level" + // value and rounding up. frexp() returns a fraction in the + // range [0.5,1) and the corresponding exponent. + int level = DoubleMath.frexp(value / ((1 << dim) * deriv)).exp; + level = Math.max(0, Math.min(S2CellId.MAX_LEVEL, -((level - 1) >> (dim - 1)))); + // assert (level == S2CellId.MAX_LEVEL || getValue(level) <= value); + // assert (level == 0 || getValue(level - 1) > value); + return level; + } + + /** + * Return the maximum level such that the metric is at least the given + * value, or zero if there is no such level. For example, + * S2.kMinWidth.GetMaxLevel(0.1) returns the maximum level such that all + * cells have a minimum width of 0.1 or larger. The return value is always a + * valid level. + */ + public int getMaxLevel(double value) { + if (value <= 0) { + return S2CellId.MAX_LEVEL; + } + + // This code is equivalent to computing a floating-point "level" + // value and rounding down. + int level = DoubleMath.frexp((1 << dim) * deriv / value).exp; + level = Math.max(0, Math.min(S2CellId.MAX_LEVEL, (level - 1) >> (dim - 1))); + // assert (level == 0 || getValue(level) >= value); + // assert (level == S2CellId.MAX_LEVEL || getValue(level + 1) < value); + return level; + } + + } + + /** + * Return a unique "origin" on the sphere for operations that need a fixed + * reference point. It should *not* be a point that is commonly used in edge + * tests in order to avoid triggering code to handle degenerate cases. (This + * rules out the north and south poles.) + */ + public static S2Point origin() { + return new S2Point(0, 1, 0); + } + + /** + * Return true if the given point is approximately unit length (this is mainly + * useful for assertions). + */ + public static boolean isUnitLength(S2Point p) { + return Math.abs(p.norm2() - 1) <= 1e-15; + } + + /** + * Return true if edge AB crosses CD at a point that is interior to both + * edges. Properties: + * + * (1) SimpleCrossing(b,a,c,d) == SimpleCrossing(a,b,c,d) (2) + * SimpleCrossing(c,d,a,b) == SimpleCrossing(a,b,c,d) + */ + public static boolean simpleCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { + // We compute SimpleCCW() for triangles ACB, CBD, BDA, and DAC. All + // of these triangles need to have the same orientation (CW or CCW) + // for an intersection to exist. Note that this is slightly more + // restrictive than the corresponding definition for planar edges, + // since we need to exclude pairs of line segments that would + // otherwise "intersect" by crossing two antipodal points. + + S2Point ab = S2Point.crossProd(a, b); + S2Point cd = S2Point.crossProd(c, d); + double acb = -ab.dotProd(c); + double cbd = -cd.dotProd(b); + double bda = ab.dotProd(d); + double dac = cd.dotProd(a); + + return (acb * cbd > 0) && (cbd * bda > 0) && (bda * dac > 0); + } + + /** + * Return a vector "c" that is orthogonal to the given unit-length vectors "a" + * and "b". This function is similar to a.CrossProd(b) except that it does a + * better job of ensuring orthogonality when "a" is nearly parallel to "b", + * and it returns a non-zero result even when a == b or a == -b. + * + * It satisfies the following properties (RCP == RobustCrossProd): + * + * (1) RCP(a,b) != 0 for all a, b (2) RCP(b,a) == -RCP(a,b) unless a == b or + * a == -b (3) RCP(-a,b) == -RCP(a,b) unless a == b or a == -b (4) RCP(a,-b) + * == -RCP(a,b) unless a == b or a == -b + */ + public static S2Point robustCrossProd(S2Point a, S2Point b) { + // The direction of a.CrossProd(b) becomes unstable as (a + b) or (a - b) + // approaches zero. This leads to situations where a.CrossProd(b) is not + // very orthogonal to "a" and/or "b". We could fix this using Gram-Schmidt, + // but we also want b.RobustCrossProd(a) == -b.RobustCrossProd(a). + // + // The easiest fix is to just compute the cross product of (b+a) and (b-a). + // Given that "a" and "b" are unit-length, this has good orthogonality to + // "a" and "b" even if they differ only in the lowest bit of one component. + + // assert (isUnitLength(a) && isUnitLength(b)); + S2Point x = S2Point.crossProd(S2Point.add(b, a), S2Point.sub(b, a)); + if (!x.equals(new S2Point(0, 0, 0))) { + return x; + } + + // The only result that makes sense mathematically is to return zero, but + // we find it more convenient to return an arbitrary orthogonal vector. + return ortho(a); + } + + /** + * Return a unit-length vector that is orthogonal to "a". Satisfies Ortho(-a) + * = -Ortho(a) for all a. + */ + public static S2Point ortho(S2Point a) { + // The current implementation in S2Point has the property we need, + // i.e. Ortho(-a) = -Ortho(a) for all a. + return a.ortho(); + } + + /** + * Return the area of triangle ABC. The method used is about twice as + * expensive as Girard's formula, but it is numerically stable for both large + * and very small triangles. The points do not need to be normalized. The area + * is always positive. + * + * The triangle area is undefined if it contains two antipodal points, and + * becomes numerically unstable as the length of any edge approaches 180 + * degrees. + */ + static double area(S2Point a, S2Point b, S2Point c) { + // This method is based on l'Huilier's theorem, + // + // tan(E/4) = sqrt(tan(s/2) tan((s-a)/2) tan((s-b)/2) tan((s-c)/2)) + // + // where E is the spherical excess of the triangle (i.e. its area), + // a, b, c, are the side lengths, and + // s is the semiperimeter (a + b + c) / 2 . + // + // The only significant source of error using l'Huilier's method is the + // cancellation error of the terms (s-a), (s-b), (s-c). This leads to a + // *relative* error of about 1e-16 * s / min(s-a, s-b, s-c). This compares + // to a relative error of about 1e-15 / E using Girard's formula, where E is + // the true area of the triangle. Girard's formula can be even worse than + // this for very small triangles, e.g. a triangle with a true area of 1e-30 + // might evaluate to 1e-5. + // + // So, we prefer l'Huilier's formula unless dmin < s * (0.1 * E), where + // dmin = min(s-a, s-b, s-c). This basically includes all triangles + // except for extremely long and skinny ones. + // + // Since we don't know E, we would like a conservative upper bound on + // the triangle area in terms of s and dmin. It's possible to show that + // E <= k1 * s * sqrt(s * dmin), where k1 = 2*sqrt(3)/Pi (about 1). + // Using this, it's easy to show that we should always use l'Huilier's + // method if dmin >= k2 * s^5, where k2 is about 1e-2. Furthermore, + // if dmin < k2 * s^5, the triangle area is at most k3 * s^4, where + // k3 is about 0.1. Since the best case error using Girard's formula + // is about 1e-15, this means that we shouldn't even consider it unless + // s >= 3e-4 or so. + + // We use volatile doubles to force the compiler to truncate all of these + // quantities to 64 bits. Otherwise it may compute a value of dmin > 0 + // simply because it chose to spill one of the intermediate values to + // memory but not one of the others. + final double sa = b.angle(c); + final double sb = c.angle(a); + final double sc = a.angle(b); + final double s = 0.5 * (sa + sb + sc); + if (s >= 3e-4) { + // Consider whether Girard's formula might be more accurate. + double s2 = s * s; + double dmin = s - Math.max(sa, Math.max(sb, sc)); + if (dmin < 1e-2 * s * s2 * s2) { + // This triangle is skinny enough to consider Girard's formula. + double area = girardArea(a, b, c); + if (dmin < s * (0.1 * area)) { + return area; + } + } + } + // Use l'Huilier's formula. + return 4 + * Math.atan( + Math.sqrt( + Math.max(0.0, + Math.tan(0.5 * s) * Math.tan(0.5 * (s - sa)) * Math.tan(0.5 * (s - sb)) + * Math.tan(0.5 * (s - sc))))); + } + + /** + * Return the area of the triangle computed using Girard's formula. This is + * slightly faster than the Area() method above is not accurate for very small + * triangles. + */ + public static double girardArea(S2Point a, S2Point b, S2Point c) { + // This is equivalent to the usual Girard's formula but is slightly + // more accurate, faster to compute, and handles a == b == c without + // a special case. + + S2Point ab = S2Point.crossProd(a, b); + S2Point bc = S2Point.crossProd(b, c); + S2Point ac = S2Point.crossProd(a, c); + return Math.max(0.0, ab.angle(ac) - ab.angle(bc) + bc.angle(ac)); + } + + /** + * Like Area(), but returns a positive value for counterclockwise triangles + * and a negative value otherwise. + */ + public static double signedArea(S2Point a, S2Point b, S2Point c) { + return area(a, b, c) * robustCCW(a, b, c); + } + + // About centroids: + // ---------------- + // + // There are several notions of the "centroid" of a triangle. First, there + // // is the planar centroid, which is simply the centroid of the ordinary + // (non-spherical) triangle defined by the three vertices. Second, there is + // the surface centroid, which is defined as the intersection of the three + // medians of the spherical triangle. It is possible to show that this + // point is simply the planar centroid projected to the surface of the + // sphere. Finally, there is the true centroid (mass centroid), which is + // defined as the area integral over the spherical triangle of (x,y,z) + // divided by the triangle area. This is the point that the triangle would + // rotate around if it was spinning in empty space. + // + // The best centroid for most purposes is the true centroid. Unlike the + // planar and surface centroids, the true centroid behaves linearly as + // regions are added or subtracted. That is, if you split a triangle into + // pieces and compute the average of their centroids (weighted by triangle + // area), the result equals the centroid of the original triangle. This is + // not true of the other centroids. + // + // Also note that the surface centroid may be nowhere near the intuitive + // "center" of a spherical triangle. For example, consider the triangle + // with vertices A=(1,eps,0), B=(0,0,1), C=(-1,eps,0) (a quarter-sphere). + // The surface centroid of this triangle is at S=(0, 2*eps, 1), which is + // within a distance of 2*eps of the vertex B. Note that the median from A + // (the segment connecting A to the midpoint of BC) passes through S, since + // this is the shortest path connecting the two endpoints. On the other + // hand, the true centroid is at M=(0, 0.5, 0.5), which when projected onto + // the surface is a much more reasonable interpretation of the "center" of + // this triangle. + + /** + * Return the centroid of the planar triangle ABC. This can be normalized to + * unit length to obtain the "surface centroid" of the corresponding spherical + * triangle, i.e. the intersection of the three medians. However, note that + * for large spherical triangles the surface centroid may be nowhere near the + * intuitive "center" (see example above). + */ + public static S2Point planarCentroid(S2Point a, S2Point b, S2Point c) { + return new S2Point((a.x + b.x + c.x) / 3.0, (a.y + b.y + c.y) / 3.0, (a.z + b.z + c.z) / 3.0); + } + + /** + * Returns the true centroid of the spherical triangle ABC multiplied by the + * signed area of spherical triangle ABC. The reasons for multiplying by the + * signed area are (1) this is the quantity that needs to be summed to compute + * the centroid of a union or difference of triangles, and (2) it's actually + * easier to calculate this way. + */ + public static S2Point trueCentroid(S2Point a, S2Point b, S2Point c) { + // I couldn't find any references for computing the true centroid of a + // spherical triangle... I have a truly marvellous demonstration of this + // formula which this margin is too narrow to contain :) + + // assert (isUnitLength(a) && isUnitLength(b) && isUnitLength(c)); + double sina = S2Point.crossProd(b, c).norm(); + double sinb = S2Point.crossProd(c, a).norm(); + double sinc = S2Point.crossProd(a, b).norm(); + double ra = (sina == 0) ? 1 : (Math.asin(sina) / sina); + double rb = (sinb == 0) ? 1 : (Math.asin(sinb) / sinb); + double rc = (sinc == 0) ? 1 : (Math.asin(sinc) / sinc); + + // Now compute a point M such that M.X = rX * det(ABC) / 2 for X in A,B,C. + S2Point x = new S2Point(a.x, b.x, c.x); + S2Point y = new S2Point(a.y, b.y, c.y); + S2Point z = new S2Point(a.z, b.z, c.z); + S2Point r = new S2Point(ra, rb, rc); + return new S2Point(0.5 * S2Point.crossProd(y, z).dotProd(r), + 0.5 * S2Point.crossProd(z, x).dotProd(r), 0.5 * S2Point.crossProd(x, y).dotProd(r)); + } + + /** + * Return true if the points A, B, C are strictly counterclockwise. Return + * false if the points are clockwise or colinear (i.e. if they are all + * contained on some great circle). + * + * Due to numerical errors, situations may arise that are mathematically + * impossible, e.g. ABC may be considered strictly CCW while BCA is not. + * However, the implementation guarantees the following: + * + * If SimpleCCW(a,b,c), then !SimpleCCW(c,b,a) for all a,b,c. + * + * In other words, ABC and CBA are guaranteed not to be both CCW + */ + public static boolean simpleCCW(S2Point a, S2Point b, S2Point c) { + // We compute the signed volume of the parallelepiped ABC. The usual + // formula for this is (AxB).C, but we compute it here using (CxA).B + // in order to ensure that ABC and CBA are not both CCW. This follows + // from the following identities (which are true numerically, not just + // mathematically): + // + // (1) x.CrossProd(y) == -(y.CrossProd(x)) + // (2) (-x).DotProd(y) == -(x.DotProd(y)) + + return S2Point.crossProd(c, a).dotProd(b) > 0; + } + + /** + * WARNING! This requires arbitrary precision arithmetic to be truly robust. + * This means that for nearly colinear AB and AC, this function may return the + * wrong answer. + * + * <p> + * Like SimpleCCW(), but returns +1 if the points are counterclockwise and -1 + * if the points are clockwise. It satisfies the following conditions: + * + * (1) RobustCCW(a,b,c) == 0 if and only if a == b, b == c, or c == a (2) + * RobustCCW(b,c,a) == RobustCCW(a,b,c) for all a,b,c (3) RobustCCW(c,b,a) + * ==-RobustCCW(a,b,c) for all a,b,c + * + * In other words: + * + * (1) The result is zero if and only if two points are the same. (2) + * Rotating the order of the arguments does not affect the result. (3) + * Exchanging any two arguments inverts the result. + * + * This function is essentially like taking the sign of the determinant of + * a,b,c, except that it has additional logic to make sure that the above + * properties hold even when the three points are coplanar, and to deal with + * the limitations of floating-point arithmetic. + */ + public static int robustCCW(S2Point a, S2Point b, S2Point c) { + return robustCCW(a, b, c, S2Point.crossProd(a, b)); + } + + /** + * A more efficient version of RobustCCW that allows the precomputed + * cross-product of A and B to be specified. + */ + public static int robustCCW(S2Point a, S2Point b, S2Point c, S2Point aCrossB) { + Preconditions.checkArgument(isUnitLength(a)); + Preconditions.checkArgument(isUnitLength(b)); + Preconditions.checkArgument(isUnitLength(c)); + + // There are 14 multiplications and additions to compute the determinant + // below. Since all three points are normalized, it is possible to show + // that the average rounding error per operation does not exceed 2**-54, + // the maximum rounding error for an operation whose result magnitude is in + // the range [0.5,1). Therefore, if the absolute value of the determinant + // is greater than 2*14*(2**-54), the determinant will have the same sign + // even if the arguments are rotated (which produces a mathematically + // equivalent result but with potentially different rounding errors). + final double kMinAbsValue = 1.6e-15; // 2 * 14 * 2**-54 + + double det = aCrossB.dotProd(c); + + // Double-check borderline cases in debug mode. + // assert ((Math.abs(det) < kMinAbsValue) || (Math.abs(det) > 1000 * kMinAbsValue) + // || (det * expensiveCCW(a, b, c) > 0)); + + if (det > kMinAbsValue) { + return 1; + } + + if (det < -kMinAbsValue) { + return -1; + } + + return expensiveCCW(a, b, c); + } + + /** + * A relatively expensive calculation invoked by RobustCCW() if the sign of + * the determinant is uncertain. + */ + private static int expensiveCCW(S2Point a, S2Point b, S2Point c) { + // Return zero if and only if two points are the same. This ensures (1). + if (a.equals(b) || b.equals(c) || c.equals(a)) { + return 0; + } + + // Now compute the determinant in a stable way. Since all three points are + // unit length and we know that the determinant is very close to zero, this + // means that points are very nearly colinear. Furthermore, the most common + // situation is where two points are nearly identical or nearly antipodal. + // To get the best accuracy in this situation, it is important to + // immediately reduce the magnitude of the arguments by computing either + // A+B or A-B for each pair of points. Note that even if A and B differ + // only in their low bits, A-B can be computed very accurately. On the + // other hand we can't accurately represent an arbitrary linear combination + // of two vectors as would be required for Gaussian elimination. The code + // below chooses the vertex opposite the longest edge as the "origin" for + // the calculation, and computes the different vectors to the other two + // vertices. This minimizes the sum of the lengths of these vectors. + // + // This implementation is very stable numerically, but it still does not + // return consistent results in all cases. For example, if three points are + // spaced far apart from each other along a great circle, the sign of the + // result will basically be random (although it will still satisfy the + // conditions documented in the header file). The only way to return + // consistent results in all cases is to compute the result using + // arbitrary-precision arithmetic. I considered using the Gnu MP library, + // but this would be very expensive (up to 2000 bits of precision may be + // needed to store the intermediate results) and seems like overkill for + // this problem. The MP library is apparently also quite particular about + // compilers and compilation options and would be a pain to maintain. + + // We want to handle the case of nearby points and nearly antipodal points + // accurately, so determine whether A+B or A-B is smaller in each case. + double sab = (a.dotProd(b) > 0) ? -1 : 1; + double sbc = (b.dotProd(c) > 0) ? -1 : 1; + double sca = (c.dotProd(a) > 0) ? -1 : 1; + S2Point vab = S2Point.add(a, S2Point.mul(b, sab)); + S2Point vbc = S2Point.add(b, S2Point.mul(c, sbc)); + S2Point vca = S2Point.add(c, S2Point.mul(a, sca)); + double dab = vab.norm2(); + double dbc = vbc.norm2(); + double dca = vca.norm2(); + + // Sort the difference vectors to find the longest edge, and use the + // opposite vertex as the origin. If two difference vectors are the same + // length, we break ties deterministically to ensure that the symmetry + // properties guaranteed in the header file will be true. + double sign; + if (dca < dbc || (dca == dbc && a.lessThan(b))) { + if (dab < dbc || (dab == dbc && a.lessThan(c))) { + // The "sab" factor converts A +/- B into B +/- A. + sign = S2Point.crossProd(vab, vca).dotProd(a) * sab; // BC is longest + // edge + } else { + sign = S2Point.crossProd(vca, vbc).dotProd(c) * sca; // AB is longest + // edge + } + } else { + if (dab < dca || (dab == dca && b.lessThan(c))) { + sign = S2Point.crossProd(vbc, vab).dotProd(b) * sbc; // CA is longest + // edge + } else { + sign = S2Point.crossProd(vca, vbc).dotProd(c) * sca; // AB is longest + // edge + } + } + if (sign > 0) { + return 1; + } + if (sign < 0) { + return -1; + } + + // The points A, B, and C are numerically indistinguishable from coplanar. + // This may be due to roundoff error, or the points may in fact be exactly + // coplanar. We handle this situation by perturbing all of the points by a + // vector (eps, eps**2, eps**3) where "eps" is an infinitesmally small + // positive number (e.g. 1 divided by a googolplex). The perturbation is + // done symbolically, i.e. we compute what would happen if the points were + // perturbed by this amount. It turns out that this is equivalent to + // checking whether the points are ordered CCW around the origin first in + // the Y-Z plane, then in the Z-X plane, and then in the X-Y plane. + + int ccw = + planarOrderedCCW(new R2Vector(a.y, a.z), new R2Vector(b.y, b.z), new R2Vector(c.y, c.z)); + if (ccw == 0) { + ccw = + planarOrderedCCW(new R2Vector(a.z, a.x), new R2Vector(b.z, b.x), new R2Vector(c.z, c.x)); + if (ccw == 0) { + ccw = planarOrderedCCW( + new R2Vector(a.x, a.y), new R2Vector(b.x, b.y), new R2Vector(c.x, c.y)); + // assert (ccw != 0); + } + } + return ccw; + } + + + public static int planarCCW(R2Vector a, R2Vector b) { + // Return +1 if the edge AB is CCW around the origin, etc. + double sab = (a.dotProd(b) > 0) ? -1 : 1; + R2Vector vab = R2Vector.add(a, R2Vector.mul(b, sab)); + double da = a.norm2(); + double db = b.norm2(); + double sign; + if (da < db || (da == db && a.lessThan(b))) { + sign = a.crossProd(vab) * sab; + } else { + sign = vab.crossProd(b); + } + if (sign > 0) { + return 1; + } + if (sign < 0) { + return -1; + } + return 0; + } + + public static int planarOrderedCCW(R2Vector a, R2Vector b, R2Vector c) { + int sum = 0; + sum += planarCCW(a, b); + sum += planarCCW(b, c); + sum += planarCCW(c, a); + if (sum > 0) { + return 1; + } + if (sum < 0) { + return -1; + } + return 0; + } + + /** + * Return true if the edges OA, OB, and OC are encountered in that order while + * sweeping CCW around the point O. You can think of this as testing whether + * A <= B <= C with respect to a continuous CCW ordering around O. + * + * Properties: + * <ol> + * <li>If orderedCCW(a,b,c,o) && orderedCCW(b,a,c,o), then a == b</li> + * <li>If orderedCCW(a,b,c,o) && orderedCCW(a,c,b,o), then b == c</li> + * <li>If orderedCCW(a,b,c,o) && orderedCCW(c,b,a,o), then a == b == c</li> + * <li>If a == b or b == c, then orderedCCW(a,b,c,o) is true</li> + * <li>Otherwise if a == c, then orderedCCW(a,b,c,o) is false</li> + * </ol> + */ + public static boolean orderedCCW(S2Point a, S2Point b, S2Point c, S2Point o) { + // The last inequality below is ">" rather than ">=" so that we return true + // if A == B or B == C, and otherwise false if A == C. Recall that + // RobustCCW(x,y,z) == -RobustCCW(z,y,x) for all x,y,z. + + int sum = 0; + if (robustCCW(b, o, a) >= 0) { + ++sum; + } + if (robustCCW(c, o, b) >= 0) { + ++sum; + } + if (robustCCW(a, o, c) > 0) { + ++sum; + } + return sum >= 2; + } + + /** + * Return the angle at the vertex B in the triangle ABC. The return value is + * always in the range [0, Pi]. The points do not need to be normalized. + * Ensures that Angle(a,b,c) == Angle(c,b,a) for all a,b,c. + * + * The angle is undefined if A or C is diametrically opposite from B, and + * becomes numerically unstable as the length of edge AB or BC approaches 180 + * degrees. + */ + public static double angle(S2Point a, S2Point b, S2Point c) { + return S2Point.crossProd(a, b).angle(S2Point.crossProd(c, b)); + } + + /** + * Return the exterior angle at the vertex B in the triangle ABC. The return + * value is positive if ABC is counterclockwise and negative otherwise. If you + * imagine an ant walking from A to B to C, this is the angle that the ant + * turns at vertex B (positive = left, negative = right). Ensures that + * TurnAngle(a,b,c) == -TurnAngle(c,b,a) for all a,b,c. + * + * @param a + * @param b + * @param c + * @return the exterior angle at the vertex B in the triangle ABC + */ + public static double turnAngle(S2Point a, S2Point b, S2Point c) { + // This is a bit less efficient because we compute all 3 cross products, but + // it ensures that turnAngle(a,b,c) == -turnAngle(c,b,a) for all a,b,c. + double outAngle = S2Point.crossProd(b, a).angle(S2Point.crossProd(c, b)); + return (robustCCW(a, b, c) > 0) ? outAngle : -outAngle; + } + + /** + * Return true if two points are within the given distance of each other + * (mainly useful for testing). + */ + public static boolean approxEquals(S2Point a, S2Point b, double maxError) { + return a.angle(b) <= maxError; + } + + public static boolean approxEquals(S2Point a, S2Point b) { + return approxEquals(a, b, 1e-15); + } + + public static boolean approxEquals(double a, double b, double maxError) { + return Math.abs(a - b) <= maxError; + } + + public static boolean approxEquals(double a, double b) { + return approxEquals(a, b, 1e-15); + } + + // Don't instantiate + private S2() { + } +} diff --git a/src/com/google/common/geometry/S2AreaCentroid.java b/src/com/google/common/geometry/S2AreaCentroid.java new file mode 100644 index 0000000..3b9e0b5 --- /dev/null +++ b/src/com/google/common/geometry/S2AreaCentroid.java @@ -0,0 +1,47 @@ +/* + * Copyright 2011 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import javax.annotation.Nullable; + +/** + * The area of an interior, i.e. the region on the left side of an odd + * number of loops and optionally a centroid. + * The area is between 0 and 4*Pi. If it has a centroid, it is + * the true centroid of the interiord multiplied by the area of the shape. + * Note that the centroid may not be contained by the shape. + * + * @author dbentley@google.com (Daniel Bentley) + */ +public final class S2AreaCentroid { + + private final double area; + private final S2Point centroid; + + public S2AreaCentroid(double area, @Nullable S2Point centroid) { + this.area = area; + this.centroid = centroid; + } + + public double getArea() { + return area; + } + + @Nullable public S2Point getCentroid() { + return centroid; + } +} diff --git a/src/com/google/common/geometry/S2Cap.java b/src/com/google/common/geometry/S2Cap.java new file mode 100644 index 0000000..11fd0b9 --- /dev/null +++ b/src/com/google/common/geometry/S2Cap.java @@ -0,0 +1,444 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + + +/** + * This class represents a spherical cap, i.e. a portion of a sphere cut off by + * a plane. The cap is defined by its axis and height. This representation has + * good numerical accuracy for very small caps (unlike the (axis, + * min-distance-from-origin) representation), and is also efficient for + * containment tests (unlike the (axis, angle) representation). + * + * Here are some useful relationships between the cap height (h), the cap + * opening angle (theta), the maximum chord length from the cap's center (d), + * and the radius of cap's base (a). All formulas assume a unit radius. + * + * h = 1 - cos(theta) = 2 sin^2(theta/2) d^2 = 2 h = a^2 + h^2 + * + */ +public strictfp class S2Cap implements S2Region { + + /** + * Multiply a positive number by this constant to ensure that the result of a + * floating point operation is at least as large as the true + * infinite-precision result. + */ + static final double ROUND_UP = 1.0 + 1.0 / (1L << 52); + + private final S2Point axis; + private double height; + + // Caps may be constructed from either an axis and a height, or an axis and + // an angle. To avoid ambiguity, there are no public constructors + private S2Cap() { + axis = new S2Point(); + height = 0; + } + + private S2Cap(S2Point axis, double height) { + this.axis = axis; + this.height = height; + // assert (isValid()); + } + + /** + * Create a cap given its axis and the cap height, i.e. the maximum projected + * distance along the cap axis from the cap center. 'axis' should be a + * unit-length vector. + */ + public static S2Cap fromAxisHeight(S2Point axis, double height) { + // assert (S2.isUnitLength(axis)); + return new S2Cap(axis, height); + } + + /** + * Create a cap given its axis and the cap opening angle, i.e. maximum angle + * between the axis and a point on the cap. 'axis' should be a unit-length + * vector, and 'angle' should be between 0 and 180 degrees. + */ + public static S2Cap fromAxisAngle(S2Point axis, S1Angle angle) { + // The height of the cap can be computed as 1-cos(angle), but this isn't + // very accurate for angles close to zero (where cos(angle) is almost 1). + // Computing it as 2*(sin(angle/2)**2) gives much better precision. + + // assert (S2.isUnitLength(axis)); + double d = Math.sin(0.5 * angle.radians()); + return new S2Cap(axis, 2 * d * d); + + } + + /** + * Create a cap given its axis and its area in steradians. 'axis' should be a + * unit-length vector, and 'area' should be between 0 and 4 * M_PI. + */ + public static S2Cap fromAxisArea(S2Point axis, double area) { + // assert (S2.isUnitLength(axis)); + return new S2Cap(axis, area / (2 * S2.M_PI)); + } + + /** Return an empty cap, i.e. a cap that contains no points. */ + public static S2Cap empty() { + return new S2Cap(new S2Point(1, 0, 0), -1); + } + + /** Return a full cap, i.e. a cap that contains all points. */ + public static S2Cap full() { + return new S2Cap(new S2Point(1, 0, 0), 2); + } + + + // Accessor methods. + public S2Point axis() { + return axis; + } + + public double height() { + return height; + } + + public double area() { + return 2 * S2.M_PI * Math.max(0.0, height); + } + + /** + * Return the cap opening angle in radians, or a negative number for empty + * caps. + */ + public S1Angle angle() { + // This could also be computed as acos(1 - height_), but the following + // formula is much more accurate when the cap height is small. It + // follows from the relationship h = 1 - cos(theta) = 2 sin^2(theta/2). + if (isEmpty()) { + return S1Angle.radians(-1); + } + return S1Angle.radians(2 * Math.asin(Math.sqrt(0.5 * height))); + } + + /** + * We allow negative heights (to represent empty caps) but not heights greater + * than 2. + */ + public boolean isValid() { + return S2.isUnitLength(axis) && height <= 2; + } + + /** Return true if the cap is empty, i.e. it contains no points. */ + public boolean isEmpty() { + return height < 0; + } + + /** Return true if the cap is full, i.e. it contains all points. */ + public boolean isFull() { + return height >= 2; + } + + /** + * Return the complement of the interior of the cap. A cap and its complement + * have the same boundary but do not share any interior points. The complement + * operator is not a bijection, since the complement of a singleton cap + * (containing a single point) is the same as the complement of an empty cap. + */ + public S2Cap complement() { + // The complement of a full cap is an empty cap, not a singleton. + // Also make sure that the complement of an empty cap has height 2. + double cHeight = isFull() ? -1 : 2 - Math.max(height, 0.0); + return S2Cap.fromAxisHeight(S2Point.neg(axis), cHeight); + } + + /** + * Return true if and only if this cap contains the given other cap (in a set + * containment sense, e.g. every cap contains the empty cap). + */ + public boolean contains(S2Cap other) { + if (isFull() || other.isEmpty()) { + return true; + } + return angle().radians() >= axis.angle(other.axis) + + other.angle().radians(); + } + + /** + * Return true if and only if the interior of this cap intersects the given + * other cap. (This relationship is not symmetric, since only the interior of + * this cap is used.) + */ + public boolean interiorIntersects(S2Cap other) { + // Interior(X) intersects Y if and only if Complement(Interior(X)) + // does not contain Y. + return !complement().contains(other); + } + + /** + * Return true if and only if the given point is contained in the interior of + * the region (i.e. the region excluding its boundary). 'p' should be a + * unit-length vector. + */ + public boolean interiorContains(S2Point p) { + // assert (S2.isUnitLength(p)); + return isFull() || S2Point.sub(axis, p).norm2() < 2 * height; + } + + /** + * Increase the cap height if necessary to include the given point. If the cap + * is empty the axis is set to the given point, but otherwise it is left + * unchanged. 'p' should be a unit-length vector. + */ + public S2Cap addPoint(S2Point p) { + // Compute the squared chord length, then convert it into a height. + // assert (S2.isUnitLength(p)); + if (isEmpty()) { + return new S2Cap(p, 0); + } else { + // To make sure that the resulting cap actually includes this point, + // we need to round up the distance calculation. That is, after + // calling cap.AddPoint(p), cap.Contains(p) should be true. + double dist2 = S2Point.sub(axis, p).norm2(); + double newHeight = Math.max(height, ROUND_UP * 0.5 * dist2); + return new S2Cap(axis, newHeight); + } + } + + // Increase the cap height if necessary to include "other". If the current + // cap is empty it is set to the given other cap. + public S2Cap addCap(S2Cap other) { + if (isEmpty()) { + return new S2Cap(other.axis, other.height); + } else { + // See comments for FromAxisAngle() and AddPoint(). This could be + // optimized by doing the calculation in terms of cap heights rather + // than cap opening angles. + double angle = axis.angle(other.axis) + other.angle().radians(); + if (angle >= S2.M_PI) { + return new S2Cap(axis, 2); //Full cap + } else { + double d = Math.sin(0.5 * angle); + double newHeight = Math.max(height, ROUND_UP * 2 * d * d); + return new S2Cap(axis, newHeight); + } + } + } + + // ////////////////////////////////////////////////////////////////////// + // S2Region interface (see {@code S2Region} for details): + @Override + public S2Cap getCapBound() { + return this; + } + + @Override + public S2LatLngRect getRectBound() { + if (isEmpty()) { + return S2LatLngRect.empty(); + } + + // Convert the axis to a (lat,lng) pair, and compute the cap angle. + S2LatLng axisLatLng = new S2LatLng(axis); + double capAngle = angle().radians(); + + boolean allLongitudes = false; + double[] lat = new double[2], lng = new double[2]; + lng[0] = -S2.M_PI; + lng[1] = S2.M_PI; + + // Check whether cap includes the south pole. + lat[0] = axisLatLng.lat().radians() - capAngle; + if (lat[0] <= -S2.M_PI_2) { + lat[0] = -S2.M_PI_2; + allLongitudes = true; + } + // Check whether cap includes the north pole. + lat[1] = axisLatLng.lat().radians() + capAngle; + if (lat[1] >= S2.M_PI_2) { + lat[1] = S2.M_PI_2; + allLongitudes = true; + } + if (!allLongitudes) { + // Compute the range of longitudes covered by the cap. We use the law + // of sines for spherical triangles. Consider the triangle ABC where + // A is the north pole, B is the center of the cap, and C is the point + // of tangency between the cap boundary and a line of longitude. Then + // C is a right angle, and letting a,b,c denote the sides opposite A,B,C, + // we have sin(a)/sin(A) = sin(c)/sin(C), or sin(A) = sin(a)/sin(c). + // Here "a" is the cap angle, and "c" is the colatitude (90 degrees + // minus the latitude). This formula also works for negative latitudes. + // + // The formula for sin(a) follows from the relationship h = 1 - cos(a). + + double sinA = Math.sqrt(height * (2 - height)); + double sinC = Math.cos(axisLatLng.lat().radians()); + if (sinA <= sinC) { + double angleA = Math.asin(sinA / sinC); + lng[0] = Math.IEEEremainder(axisLatLng.lng().radians() - angleA, + 2 * S2.M_PI); + lng[1] = Math.IEEEremainder(axisLatLng.lng().radians() + angleA, + 2 * S2.M_PI); + } + } + return new S2LatLngRect(new R1Interval(lat[0], lat[1]), new S1Interval( + lng[0], lng[1])); + } + + @Override + public boolean contains(S2Cell cell) { + // If the cap does not contain all cell vertices, return false. + // We check the vertices before taking the Complement() because we can't + // accurately represent the complement of a very small cap (a height + // of 2-epsilon is rounded off to 2). + S2Point[] vertices = new S2Point[4]; + for (int k = 0; k < 4; ++k) { + vertices[k] = cell.getVertex(k); + if (!contains(vertices[k])) { + return false; + } + } + // Otherwise, return true if the complement of the cap does not intersect + // the cell. (This test is slightly conservative, because technically we + // want Complement().InteriorIntersects() here.) + return !complement().intersects(cell, vertices); + } + + @Override + public boolean mayIntersect(S2Cell cell) { + // If the cap contains any cell vertex, return true. + S2Point[] vertices = new S2Point[4]; + for (int k = 0; k < 4; ++k) { + vertices[k] = cell.getVertex(k); + if (contains(vertices[k])) { + return true; + } + } + return intersects(cell, vertices); + } + + /** + * Return true if the cap intersects 'cell', given that the cap vertices have + * alrady been checked. + */ + public boolean intersects(S2Cell cell, S2Point[] vertices) { + // Return true if this cap intersects any point of 'cell' excluding its + // vertices (which are assumed to already have been checked). + + // If the cap is a hemisphere or larger, the cell and the complement of the + // cap are both convex. Therefore since no vertex of the cell is contained, + // no other interior point of the cell is contained either. + if (height >= 1) { + return false; + } + + // We need to check for empty caps due to the axis check just below. + if (isEmpty()) { + return false; + } + + // Optimization: return true if the cell contains the cap axis. (This + // allows half of the edge checks below to be skipped.) + if (cell.contains(axis)) { + return true; + } + + // At this point we know that the cell does not contain the cap axis, + // and the cap does not contain any cell vertex. The only way that they + // can intersect is if the cap intersects the interior of some edge. + + double sin2Angle = height * (2 - height); // sin^2(capAngle) + for (int k = 0; k < 4; ++k) { + S2Point edge = cell.getEdgeRaw(k); + double dot = axis.dotProd(edge); + if (dot > 0) { + // The axis is in the interior half-space defined by the edge. We don't + // need to consider these edges, since if the cap intersects this edge + // then it also intersects the edge on the opposite side of the cell + // (because we know the axis is not contained with the cell). + continue; + } + // The Norm2() factor is necessary because "edge" is not normalized. + if (dot * dot > sin2Angle * edge.norm2()) { + return false; // Entire cap is on the exterior side of this edge. + } + // Otherwise, the great circle containing this edge intersects + // the interior of the cap. We just need to check whether the point + // of closest approach occurs between the two edge endpoints. + S2Point dir = S2Point.crossProd(edge, axis); + if (dir.dotProd(vertices[k]) < 0 + && dir.dotProd(vertices[(k + 1) & 3]) > 0) { + return true; + } + } + return false; + } + + public boolean contains(S2Point p) { + // The point 'p' should be a unit-length vector. + // assert (S2.isUnitLength(p)); + return S2Point.sub(axis, p).norm2() <= 2 * height; + + } + + + /** Return true if two caps are identical. */ + @Override + public boolean equals(Object that) { + + if (!(that instanceof S2Cap)) { + return false; + } + + S2Cap other = (S2Cap) that; + return (axis.equals(other.axis) && height == other.height) + || (isEmpty() && other.isEmpty()) || (isFull() && other.isFull()); + + } + + @Override + public int hashCode() { + if (isFull()) { + return 17; + } else if (isEmpty()) { + return 37; + } + int result = 17; + result = 37 * result + axis.hashCode(); + long heightBits = Double.doubleToLongBits(height); + result = 37 * result + (int) ((heightBits >>> 32) ^ heightBits); + return result; + } + + // ///////////////////////////////////////////////////////////////////// + // The following static methods are convenience functions for assertions + // and testing purposes only. + + /** + * Return true if the cap axis and height differ by at most "max_error" from + * the given cap "other". + */ + boolean approxEquals(S2Cap other, double maxError) { + return (axis.aequal(other.axis, maxError) && Math.abs(height - other.height) <= maxError) + || (isEmpty() && other.height <= maxError) + || (other.isEmpty() && height <= maxError) + || (isFull() && other.height >= 2 - maxError) + || (other.isFull() && height >= 2 - maxError); + } + + boolean approxEquals(S2Cap other) { + return approxEquals(other, 1e-14); + } + + @Override + public String toString() { + return "[Point = " + axis.toString() + " Height = " + height + "]"; + } +} diff --git a/src/com/google/common/geometry/S2Cell.java b/src/com/google/common/geometry/S2Cell.java new file mode 100644 index 0000000..35f70ea --- /dev/null +++ b/src/com/google/common/geometry/S2Cell.java @@ -0,0 +1,442 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + + +/** + * An S2Cell is an S2Region object that represents a cell. Unlike S2CellIds, it + * supports efficient containment and intersection tests. However, it is also a + * more expensive representation. + * + */ + +public strictfp class S2Cell implements S2Region { + + private static final int MAX_CELL_SIZE = 1 << S2CellId.MAX_LEVEL; + + // This structure occupies 44 bytes plus one pointer for the vtable. + byte face; + byte level; + byte orientation; + S2CellId cellId; + double[][] uv = new double[2][2]; + + /** + * Default constructor used only internally. + */ + S2Cell() { + } + + /** + * An S2Cell always corresponds to a particular S2CellId. The other + * constructors are just convenience methods. + */ + public S2Cell(S2CellId id) { + init(id); + } + + // This is a static method in order to provide named parameters. + public static S2Cell fromFacePosLevel(int face, byte pos, int level) { + return new S2Cell(S2CellId.fromFacePosLevel(face, pos, level)); + } + + // Convenience methods. + public S2Cell(S2Point p) { + init(S2CellId.fromPoint(p)); + } + + public S2Cell(S2LatLng ll) { + init(S2CellId.fromLatLng(ll)); + } + + + public S2CellId id() { + return cellId; + } + + public int face() { + return face; + } + + public byte level() { + return level; + } + + public byte orientation() { + return orientation; + } + + public boolean isLeaf() { + return level == S2CellId.MAX_LEVEL; + } + + public S2Point getVertex(int k) { + return S2Point.normalize(getVertexRaw(k)); + } + + /** + * Return the k-th vertex of the cell (k = 0,1,2,3). Vertices are returned in + * CCW order. The points returned by GetVertexRaw are not necessarily unit + * length. + */ + public S2Point getVertexRaw(int k) { + // Vertices are returned in the order SW, SE, NE, NW. + return S2Projections.faceUvToXyz(face, uv[0][(k >> 1) ^ (k & 1)], uv[1][k >> 1]); + } + + public S2Point getEdge(int k) { + return S2Point.normalize(getEdgeRaw(k)); + } + + public S2Point getEdgeRaw(int k) { + switch (k) { + case 0: + return S2Projections.getVNorm(face, uv[1][0]); // South + case 1: + return S2Projections.getUNorm(face, uv[0][1]); // East + case 2: + return S2Point.neg(S2Projections.getVNorm(face, uv[1][1])); // North + default: + return S2Point.neg(S2Projections.getUNorm(face, uv[0][0])); // West + } + } + + /** + * Return the inward-facing normal of the great circle passing through the + * edge from vertex k to vertex k+1 (mod 4). The normals returned by + * GetEdgeRaw are not necessarily unit length. + * + * If this is not a leaf cell, set children[0..3] to the four children of + * this cell (in traversal order) and return true. Otherwise returns false. + * This method is equivalent to the following: + * + * for (pos=0, id=child_begin(); id != child_end(); id = id.next(), ++pos) + * children[i] = S2Cell(id); + * + * except that it is more than two times faster. + */ + public boolean subdivide(S2Cell children[]) { + // This function is equivalent to just iterating over the child cell ids + // and calling the S2Cell constructor, but it is about 2.5 times faster. + + if (cellId.isLeaf()) { + return false; + } + + // Compute the cell midpoint in uv-space. + R2Vector uvMid = getCenterUV(); + + // Create four children with the appropriate bounds. + S2CellId id = cellId.childBegin(); + for (int pos = 0; pos < 4; ++pos, id = id.next()) { + S2Cell child = children[pos]; + child.face = face; + child.level = (byte) (level + 1); + child.orientation = (byte) (orientation ^ S2.POS_TO_ORIENTATION[pos]); + child.cellId = id; + int ij = S2.POS_TO_IJ[orientation][pos]; + for (int d = 0; d < 2; ++d) { + // The dimension 0 index (i/u) is in bit 1 of ij. + int m = 1 - ((ij >> (1 - d)) & 1); + child.uv[d][m] = uvMid.get(d); + child.uv[d][1 - m] = uv[d][1 - m]; + } + } + return true; + } + + /** + * Return the direction vector corresponding to the center in (s,t)-space of + * the given cell. This is the point at which the cell is divided into four + * subcells; it is not necessarily the centroid of the cell in (u,v)-space or + * (x,y,z)-space. The point returned by GetCenterRaw is not necessarily unit + * length. + */ + public S2Point getCenter() { + return S2Point.normalize(getCenterRaw()); + } + + public S2Point getCenterRaw() { + return cellId.toPointRaw(); + } + + /** + * Return the center of the cell in (u,v) coordinates (see {@code + * S2Projections}). Note that the center of the cell is defined as the point + * at which it is recursively subdivided into four children; in general, it is + * not at the midpoint of the (u,v) rectangle covered by the cell + */ + public R2Vector getCenterUV() { + MutableInteger i = new MutableInteger(0); + MutableInteger j = new MutableInteger(0); + R2Vector centerUv = new R2Vector(); + cellId.toFaceIJOrientation(i, j, null); + int cellSize = 1 << (S2CellId.MAX_LEVEL - level); + + int sij = (i.intValue() & -cellSize) * 2 + cellSize - MAX_CELL_SIZE; + centerUv.x = S2Projections.stToUV((1.0 / MAX_CELL_SIZE) * sij); + + sij = (j.intValue() & -cellSize) * 2 + cellSize - MAX_CELL_SIZE; + centerUv.y = S2Projections.stToUV((1.0 / MAX_CELL_SIZE) * sij); + + return centerUv; + } + + /** + * Return the average area for cells at the given level. + */ + public static double averageArea(int level) { + return S2Projections.AVG_AREA.getValue(level); + } + + /** + * Return the average area of cells at this level. This is accurate to within + * a factor of 1.7 (for S2_QUADRATIC_PROJECTION) and is extremely cheap to + * compute. + */ + public double averageArea() { + return averageArea(level); + } + + /** + * Return the approximate area of this cell. This method is accurate to within + * 3% percent for all cell sizes and accurate to within 0.1% for cells at + * level 5 or higher (i.e. 300km square or smaller). It is moderately cheap to + * compute. + */ + public double approxArea() { + + // All cells at the first two levels have the same area. + if (level < 2) { + return averageArea(level); + } + + // First, compute the approximate area of the cell when projected + // perpendicular to its normal. The cross product of its diagonals gives + // the normal, and the length of the normal is twice the projected area. + double flatArea = 0.5 * S2Point.crossProd( + S2Point.sub(getVertex(2), getVertex(0)), S2Point.sub(getVertex(3), getVertex(1))).norm(); + + // Now, compensate for the curvature of the cell surface by pretending + // that the cell is shaped like a spherical cap. The ratio of the + // area of a spherical cap to the area of its projected disc turns out + // to be 2 / (1 + sqrt(1 - r*r)) where "r" is the radius of the disc. + // For example, when r=0 the ratio is 1, and when r=1 the ratio is 2. + // Here we set Pi*r*r == flat_area to find the equivalent disc. + return flatArea * 2 / (1 + Math.sqrt(1 - Math.min(S2.M_1_PI * flatArea, 1.0))); + } + + /** + * Return the area of this cell as accurately as possible. This method is more + * expensive but it is accurate to 6 digits of precision even for leaf cells + * (whose area is approximately 1e-18). + */ + public double exactArea() { + S2Point v0 = getVertex(0); + S2Point v1 = getVertex(1); + S2Point v2 = getVertex(2); + S2Point v3 = getVertex(3); + return S2.area(v0, v1, v2) + S2.area(v0, v2, v3); + } + + // ////////////////////////////////////////////////////////////////////// + // S2Region interface (see {@code S2Region} for details): + + @Override + public S2Region clone() { + S2Cell clone = new S2Cell(); + clone.face = this.face; + clone.level = this.level; + clone.orientation = this.orientation; + clone.uv = this.uv.clone(); + + return clone; + } + + @Override + public S2Cap getCapBound() { + // Use the cell center in (u,v)-space as the cap axis. This vector is + // very close to GetCenter() and faster to compute. Neither one of these + // vectors yields the bounding cap with minimal surface area, but they + // are both pretty close. + // + // It's possible to show that the two vertices that are furthest from + // the (u,v)-origin never determine the maximum cap size (this is a + // possible future optimization). + + double u = 0.5 * (uv[0][0] + uv[0][1]); + double v = 0.5 * (uv[1][0] + uv[1][1]); + S2Cap cap = S2Cap.fromAxisHeight(S2Point.normalize(S2Projections.faceUvToXyz(face, u, v)), 0); + for (int k = 0; k < 4; ++k) { + cap = cap.addPoint(getVertex(k)); + } + return cap; + } + + // We grow the bounds slightly to make sure that the bounding rectangle + // also contains the normalized versions of the vertices. Note that the + // maximum result magnitude is Pi, with a floating-point exponent of 1. + // Therefore adding or subtracting 2**-51 will always change the result. + private static final double MAX_ERROR = 1.0 / (1L << 51); + + // The 4 cells around the equator extend to +/-45 degrees latitude at the + // midpoints of their top and bottom edges. The two cells covering the + // poles extend down to +/-35.26 degrees at their vertices. + // adding kMaxError (as opposed to the C version) because of asin and atan2 + // roundoff errors + private static final double POLE_MIN_LAT = Math.asin(Math.sqrt(1.0 / 3.0)) - MAX_ERROR; + // 35.26 degrees + + + @Override + public S2LatLngRect getRectBound() { + if (level > 0) { + // Except for cells at level 0, the latitude and longitude extremes are + // attained at the vertices. Furthermore, the latitude range is + // determined by one pair of diagonally opposite vertices and the + // longitude range is determined by the other pair. + // + // We first determine which corner (i,j) of the cell has the largest + // absolute latitude. To maximize latitude, we want to find the point in + // the cell that has the largest absolute z-coordinate and the smallest + // absolute x- and y-coordinates. To do this we look at each coordinate + // (u and v), and determine whether we want to minimize or maximize that + // coordinate based on the axis direction and the cell's (u,v) quadrant. + double u = uv[0][0] + uv[0][1]; + double v = uv[1][0] + uv[1][1]; + int i = S2Projections.getUAxis(face).z == 0 ? (u < 0 ? 1 : 0) : (u > 0 ? 1 : 0); + int j = S2Projections.getVAxis(face).z == 0 ? (v < 0 ? 1 : 0) : (v > 0 ? 1 : 0); + + + R1Interval lat = R1Interval.fromPointPair(getLatitude(i, j), getLatitude(1 - i, 1 - j)); + lat = lat.expanded(MAX_ERROR).intersection(S2LatLngRect.fullLat()); + if (lat.lo() == -S2.M_PI_2 || lat.hi() == S2.M_PI_2) { + return new S2LatLngRect(lat, S1Interval.full()); + } + S1Interval lng = S1Interval.fromPointPair(getLongitude(i, 1 - j), getLongitude(1 - i, j)); + return new S2LatLngRect(lat, lng.expanded(MAX_ERROR)); + } + + + // The face centers are the +X, +Y, +Z, -X, -Y, -Z axes in that order. + // assert (S2Projections.getNorm(face).get(face % 3) == ((face < 3) ? 1 : -1)); + switch (face) { + case 0: + return new S2LatLngRect( + new R1Interval(-S2.M_PI_4, S2.M_PI_4), new S1Interval(-S2.M_PI_4, S2.M_PI_4)); + case 1: + return new S2LatLngRect( + new R1Interval(-S2.M_PI_4, S2.M_PI_4), new S1Interval(S2.M_PI_4, 3 * S2.M_PI_4)); + case 2: + return new S2LatLngRect( + new R1Interval(POLE_MIN_LAT, S2.M_PI_2), new S1Interval(-S2.M_PI, S2.M_PI)); + case 3: + return new S2LatLngRect( + new R1Interval(-S2.M_PI_4, S2.M_PI_4), new S1Interval(3 * S2.M_PI_4, -3 * S2.M_PI_4)); + case 4: + return new S2LatLngRect( + new R1Interval(-S2.M_PI_4, S2.M_PI_4), new S1Interval(-3 * S2.M_PI_4, -S2.M_PI_4)); + default: + return new S2LatLngRect( + new R1Interval(-S2.M_PI_2, -POLE_MIN_LAT), new S1Interval(-S2.M_PI, S2.M_PI)); + } + + } + + @Override + public boolean mayIntersect(S2Cell cell) { + return cellId.intersects(cell.cellId); + } + + public boolean contains(S2Point p) { + // We can't just call XYZtoFaceUV, because for points that lie on the + // boundary between two faces (i.e. u or v is +1/-1) we need to return + // true for both adjacent cells. + R2Vector uvPoint = new R2Vector(); + if (!S2Projections.faceXyzToUv(face, p, uvPoint)) { + return false; + } + return (uvPoint.x >= uv[0][0] && uvPoint.x <= uv[0][1] && uvPoint.y >= uv[1][0] + && uvPoint.y <= uv[1][1]); + } + + // The point 'p' does not need to be normalized. + @Override + public boolean contains(S2Cell cell) { + return cellId.contains(cell.cellId); + } + + private void init(S2CellId id) { + cellId = id; + MutableInteger ij[] = new MutableInteger[2]; + MutableInteger mOrientation = new MutableInteger(0); + + for (int d = 0; d < 2; ++d) { + ij[d] = new MutableInteger(0); + } + + face = (byte) id.toFaceIJOrientation(ij[0], ij[1], mOrientation); + orientation = (byte) mOrientation.intValue(); // Compress int to a byte. + level = (byte) id.level(); + int cellSize = 1 << (S2CellId.MAX_LEVEL - level); + for (int d = 0; d < 2; ++d) { + // Compute the cell bounds in scaled (i,j) coordinates. + int sijLo = (ij[d].intValue() & -cellSize) * 2 - MAX_CELL_SIZE; + int sijHi = sijLo + cellSize * 2; + uv[d][0] = S2Projections.stToUV((1.0 / MAX_CELL_SIZE) * sijLo); + uv[d][1] = S2Projections.stToUV((1.0 / MAX_CELL_SIZE) * sijHi); + } + } + + + // Internal method that does the actual work in the constructors. + + private double getLatitude(int i, int j) { + S2Point p = S2Projections.faceUvToXyz(face, uv[0][i], uv[1][j]); + return Math.atan2(p.z, Math.sqrt(p.x * p.x + p.y * p.y)); + } + + private double getLongitude(int i, int j) { + S2Point p = S2Projections.faceUvToXyz(face, uv[0][i], uv[1][j]); + return Math.atan2(p.y, p.x); + } + + // Return the latitude or longitude of the cell vertex given by (i,j), + // where "i" and "j" are either 0 or 1. + + @Override + public String toString() { + return "[" + face + ", " + level + ", " + orientation + ", " + cellId + "]"; + } + + @Override + public int hashCode() { + int value = 17; + value = 37 * (37 * (37 * value + face) + orientation) + level; + return 37 * value + id().hashCode(); + } + + @Override + public boolean equals(Object that) { + if (that instanceof S2Cell) { + S2Cell thatCell = (S2Cell) that; + return this.face == thatCell.face && this.level == thatCell.level + && this.orientation == thatCell.orientation && this.cellId.equals(thatCell.cellId); + } + return false; + } + +} diff --git a/src/com/google/common/geometry/S2CellId.java b/src/com/google/common/geometry/S2CellId.java new file mode 100644 index 0000000..4e5f1a3 --- /dev/null +++ b/src/com/google/common/geometry/S2CellId.java @@ -0,0 +1,964 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import java.util.List; +import java.util.Locale; + +/** + * An S2CellId is a 64-bit unsigned integer that uniquely identifies a cell in + * the S2 cell decomposition. It has the following format: + * + * <pre> + * id = [face][face_pos] + * </pre> + * + * face: a 3-bit number (range 0..5) encoding the cube face. + * + * face_pos: a 61-bit number encoding the position of the center of this cell + * along the Hilbert curve over this face (see the Wiki pages for details). + * + * Sequentially increasing cell ids follow a continuous space-filling curve over + * the entire sphere. They have the following properties: + * - The id of a cell at level k consists of a 3-bit face number followed by k + * bit pairs that recursively select one of the four children of each cell. The + * next bit is always 1, and all other bits are 0. Therefore, the level of a + * cell is determined by the position of its lowest-numbered bit that is turned + * on (for a cell at level k, this position is 2 * (MAX_LEVEL - k).) + * - The id of a parent cell is at the midpoint of the range of ids spanned by + * its children (or by its descendants at any level). + * + * Leaf cells are often used to represent points on the unit sphere, and this + * class provides methods for converting directly between these two + * representations. For cells that represent 2D regions rather than discrete + * point, it is better to use the S2Cell class. + * + * + */ +public strictfp class S2CellId implements Comparable<S2CellId> { + + // Although only 60 bits are needed to represent the index of a leaf + // cell, we need an extra bit in order to represent the position of + // the center of the leaf cell along the Hilbert curve. + public static final int FACE_BITS = 3; + public static final int NUM_FACES = 6; + public static final int MAX_LEVEL = 30; // Valid levels: 0..MAX_LEVEL + public static final int POS_BITS = 2 * MAX_LEVEL + 1; + public static final int MAX_SIZE = 1 << MAX_LEVEL; + + // Constant related to unsigned long's + public static final long MAX_UNSIGNED = -1L; // Equivalent to 0xffffffffffffffffL + + // The following lookup tables are used to convert efficiently between an + // (i,j) cell index and the corresponding position along the Hilbert curve. + // "lookup_pos" maps 4 bits of "i", 4 bits of "j", and 2 bits representing the + // orientation of the current cell into 8 bits representing the order in which + // that subcell is visited by the Hilbert curve, plus 2 bits indicating the + // new orientation of the Hilbert curve within that subcell. (Cell + // orientations are represented as combination of kSwapMask and kInvertMask.) + // + // "lookup_ij" is an inverted table used for mapping in the opposite + // direction. + // + // We also experimented with looking up 16 bits at a time (14 bits of position + // plus 2 of orientation) but found that smaller lookup tables gave better + // performance. (2KB fits easily in the primary cache.) + + + // Values for these constants are *declared* in the *.h file. Even though + // the declaration specifies a value for the constant, that declaration + // is not a *definition* of storage for the value. Because the values are + // supplied in the declaration, we don't need the values here. Failing to + // define storage causes link errors for any code that tries to take the + // address of one of these values. + private static final int LOOKUP_BITS = 4; + private static final int SWAP_MASK = 0x01; + private static final int INVERT_MASK = 0x02; + + private static final int[] LOOKUP_POS = new int[1 << (2 * LOOKUP_BITS + 2)]; + private static final int[] LOOKUP_IJ = new int[1 << (2 * LOOKUP_BITS + 2)]; + + /** + * This is the offset required to wrap around from the beginning of the + * Hilbert curve to the end or vice versa; see next_wrap() and prev_wrap(). + */ + private static final long WRAP_OFFSET = (long) (NUM_FACES) << POS_BITS; + + static { + initLookupCell(0, 0, 0, 0, 0, 0); + initLookupCell(0, 0, 0, SWAP_MASK, 0, SWAP_MASK); + initLookupCell(0, 0, 0, INVERT_MASK, 0, INVERT_MASK); + initLookupCell(0, 0, 0, SWAP_MASK | INVERT_MASK, 0, SWAP_MASK | INVERT_MASK); + } + + /** + * The id of the cell. + */ + private final long id; + + public S2CellId(long id) { + this.id = id; + } + + public S2CellId() { + this.id = 0; + } + + /** The default constructor returns an invalid cell id. */ + public static S2CellId none() { + return new S2CellId(); + } + + /** + * Returns an invalid cell id guaranteed to be larger than any valid cell id. + * Useful for creating indexes. + */ + public static S2CellId sentinel() { + return new S2CellId(MAX_UNSIGNED); // -1 + } + + /** + * Return a cell given its face (range 0..5), 61-bit Hilbert curve position + * within that face, and level (range 0..MAX_LEVEL). The given position will + * be modified to correspond to the Hilbert curve position at the center of + * the returned cell. This is a static function rather than a constructor in + * order to give names to the arguments. + */ + public static S2CellId fromFacePosLevel(int face, long pos, int level) { + return new S2CellId((((long) face) << POS_BITS) + (pos | 1)).parent(level); + } + + /** + * Return the leaf cell containing the given point (a direction vector, not + * necessarily unit length). + */ + public static S2CellId fromPoint(S2Point p) { + R2Vector uv = new R2Vector(); + int face = S2Projections.xyzToFaceUV(p, uv); + int i = stToIJ(S2Projections.uvToST(uv.x)); + int j = stToIJ(S2Projections.uvToST(uv.y)); + return fromFaceIJ(face, i, j); + } + + + /** Return the leaf cell containing the given S2LatLng. */ + public static S2CellId fromLatLng(S2LatLng ll) { + return fromPoint(ll.toPoint()); + } + + public S2Point toPoint() { + return S2Point.normalize(toPointRaw()); + } + + /** + * Return the direction vector corresponding to the center of the given cell. + * The vector returned by ToPointRaw is not necessarily unit length. + */ + public S2Point toPointRaw() { + // First we compute the discrete (i,j) coordinates of a leaf cell contained + // within the given cell. Given that cells are represented by the Hilbert + // curve position corresponding at their center, it turns out that the cell + // returned by ToFaceIJOrientation is always one of two leaf cells closest + // to the center of the cell (unless the given cell is a leaf cell itself, + // in which case there is only one possibility). + // + // Given a cell of size s >= 2 (i.e. not a leaf cell), and letting (imin, + // jmin) be the coordinates of its lower left-hand corner, the leaf cell + // returned by ToFaceIJOrientation() is either (imin + s/2, jmin + s/2) + // (imin + s/2 - 1, jmin + s/2 - 1). We can distinguish these two cases by + // looking at the low bit of "i" or "j". In the first case the low bit is + // zero, unless s == 2 (i.e. the level just above leaf cells) in which case + // the low bit is one. + // + // The following calculation converts (i,j) to the (si,ti) coordinates of + // the cell center. (We need to multiply the coordinates by a factor of 2 + // so that the center of leaf cells can be represented exactly.) + + MutableInteger i = new MutableInteger(0); + MutableInteger j = new MutableInteger(0); + int face = toFaceIJOrientation(i, j, null); + // System.out.println("i= " + i.intValue() + " j = " + j.intValue()); + int delta = isLeaf() ? 1 : (((i.intValue() ^ (((int) id) >>> 2)) & 1) != 0) + ? 2 : 0; + int si = (i.intValue() << 1) + delta - MAX_SIZE; + int ti = (j.intValue() << 1) + delta - MAX_SIZE; + return faceSiTiToXYZ(face, si, ti); + } + + /** Return the S2LatLng corresponding to the center of the given cell. */ + public S2LatLng toLatLng() { + return new S2LatLng(toPointRaw()); + } + + + /** The 64-bit unique identifier for this cell. */ + public long id() { + return id; + } + + /** Return true if id() represents a valid cell. */ + public boolean isValid() { + return face() < NUM_FACES && ((lowestOnBit() & (0x1555555555555555L)) != 0); + } + + /** Which cube face this cell belongs to, in the range 0..5. */ + public int face() { + return (int) (id >>> POS_BITS); + } + + /** + * The position of the cell center along the Hilbert curve over this face, in + * the range 0..(2**kPosBits-1). + */ + public long pos() { + return (id & (-1L >>> FACE_BITS)); + } + + /** Return the subdivision level of the cell (range 0..MAX_LEVEL). */ + public int level() { + // Fast path for leaf cells. + if (isLeaf()) { + return MAX_LEVEL; + } + int x = ((int) id); + int level = -1; + if (x != 0) { + level += 16; + } else { + x = (int) (id >>> 32); + } + // We only need to look at even-numbered bits to determine the + // level of a valid cell id. + x &= -x; // Get lowest bit. + if ((x & 0x00005555) != 0) { + level += 8; + } + if ((x & 0x00550055) != 0) { + level += 4; + } + if ((x & 0x05050505) != 0) { + level += 2; + } + if ((x & 0x11111111) != 0) { + level += 1; + } + // assert (level >= 0 && level <= MAX_LEVEL); + return level; + } + + + + /** + * Return true if this is a leaf cell (more efficient than checking whether + * level() == MAX_LEVEL). + */ + public boolean isLeaf() { + return ((int) id & 1) != 0; + } + + /** + * Return true if this is a top-level face cell (more efficient than checking + * whether level() == 0). + */ + public boolean isFace() { + return (id & (lowestOnBitForLevel(0) - 1)) == 0; + } + + /** + * Return the child position (0..3) of this cell's ancestor at the given + * level, relative to its parent. The argument should be in the range + * 1..MAX_LEVEL. For example, child_position(1) returns the position of this + * cell's level-1 ancestor within its top-level face cell. + */ + public int childPosition(int level) { + return (int) (id >>> (2 * (MAX_LEVEL - level) + 1)) & 3; + } + + // Methods that return the range of cell ids that are contained + // within this cell (including itself). The range is *inclusive* + // (i.e. test using >= and <=) and the return values of both + // methods are valid leaf cell ids. + // + // These methods should not be used for iteration. If you want to + // iterate through all the leaf cells, call child_begin(MAX_LEVEL) and + // child_end(MAX_LEVEL) instead. + // + // It would in fact be error-prone to define a range_end() method, + // because (range_max().id() + 1) is not always a valid cell id, and the + // iterator would need to be tested using "<" rather that the usual "!=". + public S2CellId rangeMin() { + return new S2CellId(id - (lowestOnBit() - 1)); + } + + public S2CellId rangeMax() { + return new S2CellId(id + (lowestOnBit() - 1)); + } + + + /** Return true if the given cell is contained within this one. */ + public boolean contains(S2CellId other) { + // assert (isValid() && other.isValid()); + return other.greaterOrEquals(rangeMin()) && other.lessOrEquals(rangeMax()); + } + + /** Return true if the given cell intersects this one. */ + public boolean intersects(S2CellId other) { + // assert (isValid() && other.isValid()); + return other.rangeMin().lessOrEquals(rangeMax()) + && other.rangeMax().greaterOrEquals(rangeMin()); + } + + public S2CellId parent() { + // assert (isValid() && level() > 0); + long newLsb = lowestOnBit() << 2; + return new S2CellId((id & -newLsb) | newLsb); + } + + /** + * Return the cell at the previous level or at the given level (which must be + * less than or equal to the current level). + */ + public S2CellId parent(int level) { + // assert (isValid() && level >= 0 && level <= this.level()); + long newLsb = lowestOnBitForLevel(level); + return new S2CellId((id & -newLsb) | newLsb); + } + + public S2CellId childBegin() { + // assert (isValid() && level() < MAX_LEVEL); + long oldLsb = lowestOnBit(); + return new S2CellId(id - oldLsb + (oldLsb >>> 2)); + } + + public S2CellId childBegin(int level) { + // assert (isValid() && level >= this.level() && level <= MAX_LEVEL); + return new S2CellId(id - lowestOnBit() + lowestOnBitForLevel(level)); + } + + public S2CellId childEnd() { + // assert (isValid() && level() < MAX_LEVEL); + long oldLsb = lowestOnBit(); + return new S2CellId(id + oldLsb + (oldLsb >>> 2)); + } + + public S2CellId childEnd(int level) { + // assert (isValid() && level >= this.level() && level <= MAX_LEVEL); + return new S2CellId(id + lowestOnBit() + lowestOnBitForLevel(level)); + } + + // Iterator-style methods for traversing the immediate children of a cell or + // all of the children at a given level (greater than or equal to the current + // level). Note that the end value is exclusive, just like standard STL + // iterators, and may not even be a valid cell id. You should iterate using + // code like this: + // + // for(S2CellId c = id.childBegin(); !c.equals(id.childEnd()); c = c.next()) + // ... + // + // The convention for advancing the iterator is "c = c.next()", so be sure + // to use 'equals()' in the loop guard, or compare 64-bit cell id's, + // rather than "c != id.childEnd()". + + /** + * Return the next cell at the same level along the Hilbert curve. Works + * correctly when advancing from one face to the next, but does *not* wrap + * around from the last face to the first or vice versa. + */ + public S2CellId next() { + return new S2CellId(id + (lowestOnBit() << 1)); + } + + /** + * Return the previous cell at the same level along the Hilbert curve. Works + * correctly when advancing from one face to the next, but does *not* wrap + * around from the last face to the first or vice versa. + */ + public S2CellId prev() { + return new S2CellId(id - (lowestOnBit() << 1)); + } + + + /** + * Like next(), but wraps around from the last face to the first and vice + * versa. Should *not* be used for iteration in conjunction with + * child_begin(), child_end(), Begin(), or End(). + */ + public S2CellId nextWrap() { + S2CellId n = next(); + if (unsignedLongLessThan(n.id, WRAP_OFFSET)) { + return n; + } + return new S2CellId(n.id - WRAP_OFFSET); + } + + /** + * Like prev(), but wraps around from the last face to the first and vice + * versa. Should *not* be used for iteration in conjunction with + * child_begin(), child_end(), Begin(), or End(). + */ + public S2CellId prevWrap() { + S2CellId p = prev(); + if (p.id < WRAP_OFFSET) { + return p; + } + return new S2CellId(p.id + WRAP_OFFSET); + } + + + public static S2CellId begin(int level) { + return fromFacePosLevel(0, 0, 0).childBegin(level); + } + + public static S2CellId end(int level) { + return fromFacePosLevel(5, 0, 0).childEnd(level); + } + + + /** + * Decodes the cell id from a compact text string suitable for display or + * indexing. Cells at lower levels (i.e. larger cells) are encoded into + * fewer characters. The maximum token length is 16. + * + * @param token the token to decode + * @return the S2CellId for that token + * @throws NumberFormatException if the token is not formatted correctly + */ + public static S2CellId fromToken(String token) { + if (token == null) { + throw new NumberFormatException("Null string in S2CellId.fromToken"); + } + if (token.length() == 0) { + throw new NumberFormatException("Empty string in S2CellId.fromToken"); + } + if (token.length() > 16 || "X".equals(token)) { + return none(); + } + + long value = 0; + for (int pos = 0; pos < 16; pos++) { + int digit = 0; + if (pos < token.length()) { + digit = Character.digit(token.charAt(pos), 16); + if (digit == -1) { + throw new NumberFormatException(token); + } + if (overflowInParse(value, digit)) { + throw new NumberFormatException("Too large for unsigned long: " + token); + } + } + value = (value * 16) + digit; + } + + return new S2CellId(value); + } + + /** + * Encodes the cell id to compact text strings suitable for display or indexing. + * Cells at lower levels (i.e. larger cells) are encoded into fewer characters. + * The maximum token length is 16. + * + * Simple implementation: convert the id to hex and strip trailing zeros. We + * could use base-32 or base-64, but assuming the cells used for indexing + * regions are at least 100 meters across (level 16 or less), the savings + * would be at most 3 bytes (9 bytes hex vs. 6 bytes base-64). + * + * @return the encoded cell id + */ + public String toToken() { + if (id == 0) { + return "X"; + } + + String hex = Long.toHexString(id).toLowerCase(Locale.ENGLISH); + StringBuilder sb = new StringBuilder(16); + for (int i = hex.length(); i < 16; i++) { + sb.append('0'); + } + sb.append(hex); + for (int len = 16; len > 0; len--) { + if (sb.charAt(len - 1) != '0') { + return sb.substring(0, len); + } + } + + throw new RuntimeException("Shouldn't make it here"); + } + + /** + * Returns true if (current * 10) + digit is a number too large to be + * represented by an unsigned long. This is useful for detecting overflow + * while parsing a string representation of a number. + */ + private static boolean overflowInParse(long current, int digit) { + return overflowInParse(current, digit, 10); + } + + /** + * Returns true if (current * radix) + digit is a number too large to be + * represented by an unsigned long. This is useful for detecting overflow + * while parsing a string representation of a number. + * Does not verify whether supplied radix is valid, passing an invalid radix + * will give undefined results or an ArrayIndexOutOfBoundsException. + */ + private static boolean overflowInParse(long current, int digit, int radix) { + if (current >= 0) { + if (current < maxValueDivs[radix]) { + return false; + } + if (current > maxValueDivs[radix]) { + return true; + } + // current == maxValueDivs[radix] + return (digit > maxValueMods[radix]); + } + + // current < 0: high bit is set + return true; + } + + // calculated as 0xffffffffffffffff / radix + private static final long maxValueDivs[] = {0, 0, // 0 and 1 are invalid + 9223372036854775807L, 6148914691236517205L, 4611686018427387903L, // 2-4 + 3689348814741910323L, 3074457345618258602L, 2635249153387078802L, // 5-7 + 2305843009213693951L, 2049638230412172401L, 1844674407370955161L, // 8-10 + 1676976733973595601L, 1537228672809129301L, 1418980313362273201L, // 11-13 + 1317624576693539401L, 1229782938247303441L, 1152921504606846975L, // 14-16 + 1085102592571150095L, 1024819115206086200L, 970881267037344821L, // 17-19 + 922337203685477580L, 878416384462359600L, 838488366986797800L, // 20-22 + 802032351030850070L, 768614336404564650L, 737869762948382064L, // 23-25 + 709490156681136600L, 683212743470724133L, 658812288346769700L, // 26-28 + 636094623231363848L, 614891469123651720L, 595056260442243600L, // 29-31 + 576460752303423487L, 558992244657865200L, 542551296285575047L, // 32-34 + 527049830677415760L, 512409557603043100L }; // 35-36 + + // calculated as 0xffffffffffffffff % radix + private static final int maxValueMods[] = {0, 0, // 0 and 1 are invalid + 1, 0, 3, 0, 3, 1, 7, 6, 5, 4, 3, 2, 1, 0, 15, 0, 15, 16, 15, 15, // 2-21 + 15, 5, 15, 15, 15, 24, 15, 23, 15, 15, 31, 15, 17, 15, 15 }; // 22-36 + + /** + * Return the four cells that are adjacent across the cell's four edges. + * Neighbors are returned in the order defined by S2Cell::GetEdge. All + * neighbors are guaranteed to be distinct. + */ + public void getEdgeNeighbors(S2CellId neighbors[]) { + + MutableInteger i = new MutableInteger(0); + MutableInteger j = new MutableInteger(0); + + int level = this.level(); + int size = 1 << (MAX_LEVEL - level); + int face = toFaceIJOrientation(i, j, null); + + // Edges 0, 1, 2, 3 are in the S, E, N, W directions. + neighbors[0] = fromFaceIJSame(face, i.intValue(), j.intValue() - size, + j.intValue() - size >= 0).parent(level); + neighbors[1] = fromFaceIJSame(face, i.intValue() + size, j.intValue(), + i.intValue() + size < MAX_SIZE).parent(level); + neighbors[2] = fromFaceIJSame(face, i.intValue(), j.intValue() + size, + j.intValue() + size < MAX_SIZE).parent(level); + neighbors[3] = fromFaceIJSame(face, i.intValue() - size, j.intValue(), + i.intValue() - size >= 0).parent(level); + } + + /** + * Return the neighbors of closest vertex to this cell at the given level, by + * appending them to "output". Normally there are four neighbors, but the + * closest vertex may only have three neighbors if it is one of the 8 cube + * vertices. + * + * Requires: level < this.evel(), so that we can determine which vertex is + * closest (in particular, level == MAX_LEVEL is not allowed). + */ + public void getVertexNeighbors(int level, List<S2CellId> output) { + // "level" must be strictly less than this cell's level so that we can + // determine which vertex this cell is closest to. + // assert (level < this.level()); + MutableInteger i = new MutableInteger(0); + MutableInteger j = new MutableInteger(0); + int face = toFaceIJOrientation(i, j, null); + + // Determine the i- and j-offsets to the closest neighboring cell in each + // direction. This involves looking at the next bit of "i" and "j" to + // determine which quadrant of this->parent(level) this cell lies in. + int halfsize = 1 << (MAX_LEVEL - (level + 1)); + int size = halfsize << 1; + boolean isame, jsame; + int ioffset, joffset; + if ((i.intValue() & halfsize) != 0) { + ioffset = size; + isame = (i.intValue() + size) < MAX_SIZE; + } else { + ioffset = -size; + isame = (i.intValue() - size) >= 0; + } + if ((j.intValue() & halfsize) != 0) { + joffset = size; + jsame = (j.intValue() + size) < MAX_SIZE; + } else { + joffset = -size; + jsame = (j.intValue() - size) >= 0; + } + + output.add(parent(level)); + output + .add(fromFaceIJSame(face, i.intValue() + ioffset, j.intValue(), isame) + .parent(level)); + output + .add(fromFaceIJSame(face, i.intValue(), j.intValue() + joffset, jsame) + .parent(level)); + // If i- and j- edge neighbors are *both* on a different face, then this + // vertex only has three neighbors (it is one of the 8 cube vertices). + if (isame || jsame) { + output.add(fromFaceIJSame(face, i.intValue() + ioffset, + j.intValue() + joffset, isame && jsame).parent(level)); + } + } + + /** + * Append all neighbors of this cell at the given level to "output". Two cells + * X and Y are neighbors if their boundaries intersect but their interiors do + * not. In particular, two cells that intersect at a single point are + * neighbors. + * + * Requires: nbr_level >= this->level(). Note that for cells adjacent to a + * face vertex, the same neighbor may be appended more than once. + */ + public void getAllNeighbors(int nbrLevel, List<S2CellId> output) { + MutableInteger i = new MutableInteger(0); + MutableInteger j = new MutableInteger(0); + + int face = toFaceIJOrientation(i, j, null); + + // Find the coordinates of the lower left-hand leaf cell. We need to + // normalize (i,j) to a known position within the cell because nbr_level + // may be larger than this cell's level. + int size = 1 << (MAX_LEVEL - level()); + i.setValue(i.intValue() & -size); + j.setValue(j.intValue() & -size); + + int nbrSize = 1 << (MAX_LEVEL - nbrLevel); + // assert (nbrSize <= size); + + // We compute the N-S, E-W, and diagonal neighbors in one pass. + // The loop test is at the end of the loop to avoid 32-bit overflow. + for (int k = -nbrSize;; k += nbrSize) { + boolean sameFace; + if (k < 0) { + sameFace = (j.intValue() + k >= 0); + } else if (k >= size) { + sameFace = (j.intValue() + k < MAX_SIZE); + } else { + sameFace = true; + // North and South neighbors. + output.add(fromFaceIJSame(face, i.intValue() + k, + j.intValue() - nbrSize, j.intValue() - size >= 0).parent(nbrLevel)); + output.add(fromFaceIJSame(face, i.intValue() + k, j.intValue() + size, + j.intValue() + size < MAX_SIZE).parent(nbrLevel)); + } + // East, West, and Diagonal neighbors. + output.add(fromFaceIJSame(face, i.intValue() - nbrSize, + j.intValue() + k, sameFace && i.intValue() - size >= 0).parent( + nbrLevel)); + output.add(fromFaceIJSame(face, i.intValue() + size, j.intValue() + k, + sameFace && i.intValue() + size < MAX_SIZE).parent(nbrLevel)); + if (k >= size) { + break; + } + } + } + + // /////////////////////////////////////////////////////////////////// + // Low-level methods. + + /** + * Return a leaf cell given its cube face (range 0..5) and i- and + * j-coordinates (see s2.h). + */ + public static S2CellId fromFaceIJ(int face, int i, int j) { + // Optimization notes: + // - Non-overlapping bit fields can be combined with either "+" or "|". + // Generally "+" seems to produce better code, but not always. + + // gcc doesn't have very good code generation for 64-bit operations. + // We optimize this by computing the result as two 32-bit integers + // and combining them at the end. Declaring the result as an array + // rather than local variables helps the compiler to do a better job + // of register allocation as well. Note that the two 32-bits halves + // get shifted one bit to the left when they are combined. + long n[] = {0, face << (POS_BITS - 33)}; + + // Alternating faces have opposite Hilbert curve orientations; this + // is necessary in order for all faces to have a right-handed + // coordinate system. + int bits = (face & SWAP_MASK); + + // Each iteration maps 4 bits of "i" and "j" into 8 bits of the Hilbert + // curve position. The lookup table transforms a 10-bit key of the form + // "iiiijjjjoo" to a 10-bit value of the form "ppppppppoo", where the + // letters [ijpo] denote bits of "i", "j", Hilbert curve position, and + // Hilbert curve orientation respectively. + + for (int k = 7; k >= 0; --k) { + bits = getBits(n, i, j, k, bits); + } + + S2CellId s = new S2CellId((((n[1] << 32) + n[0]) << 1) + 1); + return s; + } + + private static int getBits(long[] n, int i, int j, int k, int bits) { + final int mask = (1 << LOOKUP_BITS) - 1; + bits += (((i >> (k * LOOKUP_BITS)) & mask) << (LOOKUP_BITS + 2)); + bits += (((j >> (k * LOOKUP_BITS)) & mask) << 2); + bits = LOOKUP_POS[bits]; + n[k >> 2] |= ((((long) bits) >> 2) << ((k & 3) * 2 * LOOKUP_BITS)); + bits &= (SWAP_MASK | INVERT_MASK); + return bits; + } + + + /** + * Return the (face, i, j) coordinates for the leaf cell corresponding to this + * cell id. Since cells are represented by the Hilbert curve position at the + * center of the cell, the returned (i,j) for non-leaf cells will be a leaf + * cell adjacent to the cell center. If "orientation" is non-NULL, also return + * the Hilbert curve orientation for the current cell. + */ + public int toFaceIJOrientation(MutableInteger pi, MutableInteger pj, + MutableInteger orientation) { + // System.out.println("Entering toFaceIjorientation"); + int face = this.face(); + int bits = (face & SWAP_MASK); + + // System.out.println("face = " + face + " bits = " + bits); + + // Each iteration maps 8 bits of the Hilbert curve position into + // 4 bits of "i" and "j". The lookup table transforms a key of the + // form "ppppppppoo" to a value of the form "iiiijjjjoo", where the + // letters [ijpo] represents bits of "i", "j", the Hilbert curve + // position, and the Hilbert curve orientation respectively. + // + // On the first iteration we need to be careful to clear out the bits + // representing the cube face. + for (int k = 7; k >= 0; --k) { + bits = getBits1(pi, pj, k, bits); + // System.out.println("pi = " + pi + " pj= " + pj + " bits = " + bits); + } + + if (orientation != null) { + // The position of a non-leaf cell at level "n" consists of a prefix of + // 2*n bits that identifies the cell, followed by a suffix of + // 2*(MAX_LEVEL-n)+1 bits of the form 10*. If n==MAX_LEVEL, the suffix is + // just "1" and has no effect. Otherwise, it consists of "10", followed + // by (MAX_LEVEL-n-1) repetitions of "00", followed by "0". The "10" has + // no effect, while each occurrence of "00" has the effect of reversing + // the kSwapMask bit. + // assert (S2.POS_TO_ORIENTATION[2] == 0); + // assert (S2.POS_TO_ORIENTATION[0] == S2.SWAP_MASK); + if ((lowestOnBit() & 0x1111111111111110L) != 0) { + bits ^= S2.SWAP_MASK; + } + orientation.setValue(bits); + } + return face; + } + + private int getBits1(MutableInteger i, MutableInteger j, int k, int bits) { + final int nbits = (k == 7) ? (MAX_LEVEL - 7 * LOOKUP_BITS) : LOOKUP_BITS; + + bits += (((int) (id >>> (k * 2 * LOOKUP_BITS + 1)) & + ((1 << (2 * nbits)) - 1))) << 2; + /* + * System.out.println("id is: " + id_); System.out.println("bits is " + + * bits); System.out.println("lookup_ij[bits] is " + lookup_ij[bits]); + */ + bits = LOOKUP_IJ[bits]; + i.setValue(i.intValue() + + ((bits >> (LOOKUP_BITS + 2)) << (k * LOOKUP_BITS))); + /* + * System.out.println("left is " + ((bits >> 2) & ((1 << kLookupBits) - + * 1))); System.out.println("right is " + (k * kLookupBits)); + * System.out.println("j is: " + j.intValue()); System.out.println("addition + * is: " + ((((bits >> 2) & ((1 << kLookupBits) - 1))) << (k * + * kLookupBits))); + */ + j.setValue(j.intValue() + + ((((bits >> 2) & ((1 << LOOKUP_BITS) - 1))) << (k * LOOKUP_BITS))); + bits &= (SWAP_MASK | INVERT_MASK); + return bits; + } + + /** Return the lowest-numbered bit that is on for cells at the given level. */ + public long lowestOnBit() { + return id & -id; + } + + /** + * Return the lowest-numbered bit that is on for this cell id, which is equal + * to (uint64(1) << (2 * (MAX_LEVEL - level))). So for example, a.lsb() <= + * b.lsb() if and only if a.level() >= b.level(), but the first test is more + * efficient. + */ + public static long lowestOnBitForLevel(int level) { + return 1L << (2 * (MAX_LEVEL - level)); + } + + + /** + * Return the i- or j-index of the leaf cell containing the given s- or + * t-value. + */ + private static int stToIJ(double s) { + // Converting from floating-point to integers via static_cast is very slow + // on Intel processors because it requires changing the rounding mode. + // Rounding to the nearest integer using FastIntRound() is much faster. + + final int m = MAX_SIZE / 2; // scaling multiplier + return (int) Math + .max(0, Math.min(2 * m - 1, Math.round(m * s + (m - 0.5)))); + } + + /** + * Convert (face, si, ti) coordinates (see s2.h) to a direction vector (not + * necessarily unit length). + */ + private static S2Point faceSiTiToXYZ(int face, int si, int ti) { + final double kScale = 1.0 / MAX_SIZE; + double u = S2Projections.stToUV(kScale * si); + double v = S2Projections.stToUV(kScale * ti); + return S2Projections.faceUvToXyz(face, u, v); + } + + /** + * Given (i, j) coordinates that may be out of bounds, normalize them by + * returning the corresponding neighbor cell on an adjacent face. + */ + private static S2CellId fromFaceIJWrap(int face, int i, int j) { + // Convert i and j to the coordinates of a leaf cell just beyond the + // boundary of this face. This prevents 32-bit overflow in the case + // of finding the neighbors of a face cell, and also means that we + // don't need to worry about the distinction between (s,t) and (u,v). + i = Math.max(-1, Math.min(MAX_SIZE, i)); + j = Math.max(-1, Math.min(MAX_SIZE, j)); + + // Find the (s,t) coordinates corresponding to (i,j). At least one + // of these coordinates will be just outside the range [0, 1]. + final double kScale = 1.0 / MAX_SIZE; + double s = kScale * ((i << 1) + 1 - MAX_SIZE); + double t = kScale * ((j << 1) + 1 - MAX_SIZE); + + // Find the leaf cell coordinates on the adjacent face, and convert + // them to a cell id at the appropriate level. + R2Vector st = new R2Vector(); + face = S2Projections.xyzToFaceUV(S2Projections.faceUvToXyz(face, s, t), st); + return fromFaceIJ(face, stToIJ(st.x), stToIJ(st.y)); + } + + /** + * Public helper function that calls FromFaceIJ if sameFace is true, or + * FromFaceIJWrap if sameFace is false. + */ + public static S2CellId fromFaceIJSame(int face, int i, int j, + boolean sameFace) { + if (sameFace) { + return S2CellId.fromFaceIJ(face, i, j); + } else { + return S2CellId.fromFaceIJWrap(face, i, j); + } + } + + @Override + public boolean equals(Object that) { + if (!(that instanceof S2CellId)) { + return false; + } + S2CellId x = (S2CellId) that; + return id() == x.id(); + } + + /** + * Returns true if x1 < x2, when both values are treated as unsigned. + */ + public static boolean unsignedLongLessThan(long x1, long x2) { + return (x1 + Long.MIN_VALUE) < (x2 + Long.MIN_VALUE); + } + + /** + * Returns true if x1 > x2, when both values are treated as unsigned. + */ + public static boolean unsignedLongGreaterThan(long x1, long x2) { + return (x1 + Long.MIN_VALUE) > (x2 + Long.MIN_VALUE); + } + + public boolean lessThan(S2CellId x) { + return unsignedLongLessThan(id, x.id); + } + + public boolean greaterThan(S2CellId x) { + return unsignedLongGreaterThan(id, x.id); + } + + public boolean lessOrEquals(S2CellId x) { + return unsignedLongLessThan(id, x.id) || id == x.id; + } + + public boolean greaterOrEquals(S2CellId x) { + return unsignedLongGreaterThan(id, x.id) || id == x.id; + } + + @Override + public int hashCode() { + return (int) ((id >>> 32) + id); + } + + + @Override + public String toString() { + return "(face=" + face() + ", pos=" + Long.toHexString(pos()) + ", level=" + + level() + ")"; + } + + private static void initLookupCell(int level, int i, int j, + int origOrientation, int pos, int orientation) { + if (level == LOOKUP_BITS) { + int ij = (i << LOOKUP_BITS) + j; + LOOKUP_POS[(ij << 2) + origOrientation] = (pos << 2) + orientation; + LOOKUP_IJ[(pos << 2) + origOrientation] = (ij << 2) + orientation; + } else { + level++; + i <<= 1; + j <<= 1; + pos <<= 2; + int[] r = S2.POS_TO_IJ[orientation]; + initLookupCell(level, i + (r[0] >>> 1), j + (r[0] & 1), origOrientation, + pos, orientation ^ S2.POS_TO_ORIENTATION[0]); + initLookupCell(level, i + (r[1] >>> 1), j + (r[1] & 1), origOrientation, + pos + 1, orientation ^ S2.POS_TO_ORIENTATION[1]); + initLookupCell(level, i + (r[2] >>> 1), j + (r[2] & 1), origOrientation, + pos + 2, orientation ^ S2.POS_TO_ORIENTATION[2]); + initLookupCell(level, i + (r[3] >>> 1), j + (r[3] & 1), origOrientation, + pos + 3, orientation ^ S2.POS_TO_ORIENTATION[3]); + } + } + + @Override + public int compareTo(S2CellId that) { + return unsignedLongLessThan(this.id, that.id) ? -1 : + unsignedLongGreaterThan(this.id, that.id) ? 1 : 0; + } + +} diff --git a/src/com/google/common/geometry/S2CellUnion.java b/src/com/google/common/geometry/S2CellUnion.java new file mode 100644 index 0000000..5666de8 --- /dev/null +++ b/src/com/google/common/geometry/S2CellUnion.java @@ -0,0 +1,619 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.collect.Lists; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +/** + * An S2CellUnion is a region consisting of cells of various sizes. Typically a + * cell union is used to approximate some other shape. There is a tradeoff + * between the accuracy of the approximation and how many cells are used. Unlike + * polygons, cells have a fixed hierarchical structure. This makes them more + * suitable for optimizations based on preprocessing. + * + */ +public strictfp class S2CellUnion implements S2Region, Iterable<S2CellId> { + + /** The CellIds that form the Union */ + private ArrayList<S2CellId> cellIds = new ArrayList<S2CellId>(); + + public S2CellUnion() { + } + + public void initFromCellIds(ArrayList<S2CellId> cellIds) { + initRawCellIds(cellIds); + normalize(); + } + + /** + * Populates a cell union with the given S2CellIds or 64-bit cells ids, and + * then calls Normalize(). The InitSwap() version takes ownership of the + * vector data without copying and clears the given vector. These methods may + * be called multiple times. + */ + public void initFromIds(ArrayList<Long> cellIds) { + initRawIds(cellIds); + normalize(); + } + + public void initSwap(ArrayList<S2CellId> cellIds) { + initRawSwap(cellIds); + normalize(); + } + + public void initRawCellIds(ArrayList<S2CellId> cellIds) { + this.cellIds = cellIds; + } + + public void initRawIds(ArrayList<Long> cellIds) { + int size = cellIds.size(); + this.cellIds = new ArrayList<S2CellId>(size); + for (Long id : cellIds) { + this.cellIds.add(new S2CellId(id)); + } + } + + /** + * Like Init(), but does not call Normalize(). The cell union *must* be + * normalized before doing any calculations with it, so it is the caller's + * responsibility to make sure that the input is normalized. This method is + * useful when converting cell unions to another representation and back. + * These methods may be called multiple times. + */ + public void initRawSwap(ArrayList<S2CellId> cellIds) { + this.cellIds = new ArrayList<S2CellId>(cellIds); + cellIds.clear(); + } + + public int size() { + return cellIds.size(); + } + + /** Convenience methods for accessing the individual cell ids. */ + public S2CellId cellId(int i) { + return cellIds.get(i); + } + + /** Enable iteration over the union's cells. */ + @Override + public Iterator<S2CellId> iterator() { + return cellIds.iterator(); + } + + /** Direct access to the underlying vector for iteration . */ + public ArrayList<S2CellId> cellIds() { + return cellIds; + } + + /** + * Replaces "output" with an expanded version of the cell union where any + * cells whose level is less than "min_level" or where (level - min_level) is + * not a multiple of "level_mod" are replaced by their children, until either + * both of these conditions are satisfied or the maximum level is reached. + * + * This method allows a covering generated by S2RegionCoverer using + * min_level() or level_mod() constraints to be stored as a normalized cell + * union (which allows various geometric computations to be done) and then + * converted back to the original list of cell ids that satisfies the desired + * constraints. + */ + public void denormalize(int minLevel, int levelMod, ArrayList<S2CellId> output) { + // assert (minLevel >= 0 && minLevel <= S2CellId.MAX_LEVEL); + // assert (levelMod >= 1 && levelMod <= 3); + + output.clear(); + output.ensureCapacity(size()); + for (S2CellId id : this) { + int level = id.level(); + int newLevel = Math.max(minLevel, level); + if (levelMod > 1) { + // Round up so that (new_level - min_level) is a multiple of level_mod. + // (Note that S2CellId::kMaxLevel is a multiple of 1, 2, and 3.) + newLevel += (S2CellId.MAX_LEVEL - (newLevel - minLevel)) % levelMod; + newLevel = Math.min(S2CellId.MAX_LEVEL, newLevel); + } + if (newLevel == level) { + output.add(id); + } else { + S2CellId end = id.childEnd(newLevel); + for (id = id.childBegin(newLevel); !id.equals(end); id = id.next()) { + output.add(id); + } + } + } + } + + /** + * If there are more than "excess" elements of the cell_ids() vector that are + * allocated but unused, reallocate the array to eliminate the excess space. + * This reduces memory usage when many cell unions need to be held in memory + * at once. + */ + public void pack() { + cellIds.trimToSize(); + } + + + /** + * Return true if the cell union contains the given cell id. Containment is + * defined with respect to regions, e.g. a cell contains its 4 children. This + * is a fast operation (logarithmic in the size of the cell union). + */ + public boolean contains(S2CellId id) { + // This function requires that Normalize has been called first. + // + // This is an exact test. Each cell occupies a linear span of the S2 + // space-filling curve, and the cell id is simply the position at the center + // of this span. The cell union ids are sorted in increasing order along + // the space-filling curve. So we simply find the pair of cell ids that + // surround the given cell id (using binary search). There is containment + // if and only if one of these two cell ids contains this cell. + + int pos = Collections.binarySearch(cellIds, id); + if (pos < 0) { + pos = -pos - 1; + } + if (pos < cellIds.size() && cellIds.get(pos).rangeMin().lessOrEquals(id)) { + return true; + } + return pos != 0 && cellIds.get(pos - 1).rangeMax().greaterOrEquals(id); + } + + /** + * Return true if the cell union intersects the given cell id. This is a fast + * operation (logarithmic in the size of the cell union). + */ + public boolean intersects(S2CellId id) { + // This function requires that Normalize has been called first. + // This is an exact test; see the comments for Contains() above. + int pos = Collections.binarySearch(cellIds, id); + + if (pos < 0) { + pos = -pos - 1; + } + + + if (pos < cellIds.size() && cellIds.get(pos).rangeMin().lessOrEquals(id.rangeMax())) { + return true; + } + return pos != 0 && cellIds.get(pos - 1).rangeMax().greaterOrEquals(id.rangeMin()); + } + + public boolean contains(S2CellUnion that) { + // TODO(kirilll?): A divide-and-conquer or alternating-skip-search approach + // may be significantly faster in both the average and worst case. + for (S2CellId id : that) { + if (!this.contains(id)) { + return false; + } + } + return true; + } + + /** This is a fast operation (logarithmic in the size of the cell union). */ + @Override + public boolean contains(S2Cell cell) { + return contains(cell.id()); + } + + /** + * Return true if this cell union contain/intersects the given other cell + * union. + */ + public boolean intersects(S2CellUnion union) { + // TODO(kirilll?): A divide-and-conquer or alternating-skip-search approach + // may be significantly faster in both the average and worst case. + for (S2CellId id : union) { + if (intersects(id)) { + return true; + } + } + return false; + } + + public void getUnion(S2CellUnion x, S2CellUnion y) { + // assert (x != this && y != this); + cellIds.clear(); + cellIds.ensureCapacity(x.size() + y.size()); + cellIds.addAll(x.cellIds); + cellIds.addAll(y.cellIds); + normalize(); + } + + /** + * Specialized version of GetIntersection() that gets the intersection of a + * cell union with the given cell id. This can be useful for "splitting" a + * cell union into chunks. + */ + public void getIntersection(S2CellUnion x, S2CellId id) { + // assert (x != this); + cellIds.clear(); + if (x.contains(id)) { + cellIds.add(id); + } else { + int pos = Collections.binarySearch(x.cellIds, id.rangeMin()); + + if (pos < 0) { + pos = -pos - 1; + } + + S2CellId idmax = id.rangeMax(); + int size = x.cellIds.size(); + while (pos < size && x.cellIds.get(pos).lessOrEquals(idmax)) { + cellIds.add(x.cellIds.get(pos++)); + } + } + } + + /** + * Initialize this cell union to the union or intersection of the two given + * cell unions. Requires: x != this and y != this. + */ + public void getIntersection(S2CellUnion x, S2CellUnion y) { + // assert (x != this && y != this); + + // This is a fairly efficient calculation that uses binary search to skip + // over sections of both input vectors. It takes constant time if all the + // cells of "x" come before or after all the cells of "y" in S2CellId order. + + cellIds.clear(); + + int i = 0; + int j = 0; + + while (i < x.cellIds.size() && j < y.cellIds.size()) { + S2CellId imin = x.cellId(i).rangeMin(); + S2CellId jmin = y.cellId(j).rangeMin(); + if (imin.greaterThan(jmin)) { + // Either j->contains(*i) or the two cells are disjoint. + if (x.cellId(i).lessOrEquals(y.cellId(j).rangeMax())) { + cellIds.add(x.cellId(i++)); + } else { + // Advance "j" to the first cell possibly contained by *i. + j = indexedBinarySearch(y.cellIds, imin, j + 1); + // The previous cell *(j-1) may now contain *i. + if (x.cellId(i).lessOrEquals(y.cellId(j - 1).rangeMax())) { + --j; + } + } + } else if (jmin.greaterThan(imin)) { + // Identical to the code above with "i" and "j" reversed. + if (y.cellId(j).lessOrEquals(x.cellId(i).rangeMax())) { + cellIds.add(y.cellId(j++)); + } else { + i = indexedBinarySearch(x.cellIds, jmin, i + 1); + if (y.cellId(j).lessOrEquals(x.cellId(i - 1).rangeMax())) { + --i; + } + } + } else { + // "i" and "j" have the same range_min(), so one contains the other. + if (x.cellId(i).lessThan(y.cellId(j))) { + cellIds.add(x.cellId(i++)); + } else { + cellIds.add(y.cellId(j++)); + } + } + } + // The output is generated in sorted order, and there should not be any + // cells that can be merged (provided that both inputs were normalized). + // assert (!normalize()); + } + + /** + * Just as normal binary search, except that it allows specifying the starting + * value for the lower bound. + * + * @return The position of the searched element in the list (if found), or the + * position where the element could be inserted without violating the + * order. + */ + private int indexedBinarySearch(List<S2CellId> l, S2CellId key, int low) { + int high = l.size() - 1; + + while (low <= high) { + int mid = (low + high) >> 1; + S2CellId midVal = l.get(mid); + int cmp = midVal.compareTo(key); + + if (cmp < 0) { + low = mid + 1; + } else if (cmp > 0) { + high = mid - 1; + } else { + return mid; // key found + } + } + return low; // key not found + } + + /** + * Expands the cell union such that it contains all cells of the given level + * that are adjacent to any cell of the original union. Two cells are defined + * as adjacent if their boundaries have any points in common, i.e. most cells + * have 8 adjacent cells (not counting the cell itself). + * + * Note that the size of the output is exponential in "level". For example, + * if level == 20 and the input has a cell at level 10, there will be on the + * order of 4000 adjacent cells in the output. For most applications the + * Expand(min_fraction, min_distance) method below is easier to use. + */ + public void expand(int level) { + ArrayList<S2CellId> output = new ArrayList<S2CellId>(); + long levelLsb = S2CellId.lowestOnBitForLevel(level); + int i = size() - 1; + do { + S2CellId id = cellId(i); + if (id.lowestOnBit() < levelLsb) { + id = id.parent(level); + // Optimization: skip over any cells contained by this one. This is + // especially important when very small regions are being expanded. + while (i > 0 && id.contains(cellId(i - 1))) { + --i; + } + } + output.add(id); + id.getAllNeighbors(level, output); + } while (--i >= 0); + initSwap(output); + } + + /** + * Expand the cell union such that it contains all points whose distance to + * the cell union is at most minRadius, but do not use cells that are more + * than maxLevelDiff levels higher than the largest cell in the input. The + * second parameter controls the tradeoff between accuracy and output size + * when a large region is being expanded by a small amount (e.g. expanding + * Canada by 1km). + * + * For example, if maxLevelDiff == 4, the region will always be expanded by + * approximately 1/16 the width of its largest cell. Note that in the worst + * case, the number of cells in the output can be up to 4 * (1 + 2 ** + * maxLevelDiff) times larger than the number of cells in the input. + */ + public void expand(S1Angle minRadius, int maxLevelDiff) { + int minLevel = S2CellId.MAX_LEVEL; + for (S2CellId id : this) { + minLevel = Math.min(minLevel, id.level()); + } + // Find the maximum level such that all cells are at least "min_radius" + // wide. + int radiusLevel = S2Projections.MIN_WIDTH.getMaxLevel(minRadius.radians()); + if (radiusLevel == 0 && minRadius.radians() > S2Projections.MIN_WIDTH.getValue(0)) { + // The requested expansion is greater than the width of a face cell. + // The easiest way to handle this is to expand twice. + expand(0); + } + expand(Math.min(minLevel + maxLevelDiff, radiusLevel)); + } + + @Override + public S2Region clone() { + S2CellUnion copy = new S2CellUnion(); + copy.initRawCellIds(Lists.newArrayList(cellIds)); + return copy; + } + + @Override + public S2Cap getCapBound() { + // Compute the approximate centroid of the region. This won't produce the + // bounding cap of minimal area, but it should be close enough. + if (cellIds.isEmpty()) { + return S2Cap.empty(); + } + S2Point centroid = new S2Point(0, 0, 0); + for (S2CellId id : this) { + double area = S2Cell.averageArea(id.level()); + centroid = S2Point.add(centroid, S2Point.mul(id.toPoint(), area)); + } + if (centroid.equals(new S2Point(0, 0, 0))) { + centroid = new S2Point(1, 0, 0); + } else { + centroid = S2Point.normalize(centroid); + } + + // Use the centroid as the cap axis, and expand the cap angle so that it + // contains the bounding caps of all the individual cells. Note that it is + // *not* sufficient to just bound all the cell vertices because the bounding + // cap may be concave (i.e. cover more than one hemisphere). + S2Cap cap = S2Cap.fromAxisHeight(centroid, 0); + for (S2CellId id : this) { + cap = cap.addCap(new S2Cell(id).getCapBound()); + } + return cap; + } + + @Override + public S2LatLngRect getRectBound() { + S2LatLngRect bound = S2LatLngRect.empty(); + for (S2CellId id : this) { + bound = bound.union(new S2Cell(id).getRectBound()); + } + return bound; + } + + + /** This is a fast operation (logarithmic in the size of the cell union). */ + @Override + public boolean mayIntersect(S2Cell cell) { + return intersects(cell.id()); + } + + /** + * The point 'p' does not need to be normalized. This is a fast operation + * (logarithmic in the size of the cell union). + */ + public boolean contains(S2Point p) { + return contains(S2CellId.fromPoint(p)); + + } + + /** + * The number of leaf cells covered by the union. + * This will be no more than 6*2^60 for the whole sphere. + * + * @return the number of leaf cells covered by the union + */ + public long leafCellsCovered() { + long numLeaves = 0; + for (S2CellId cellId : cellIds) { + int invertedLevel = S2CellId.MAX_LEVEL - cellId.level(); + numLeaves += (1L << (invertedLevel << 1)); + } + return numLeaves; + } + + + /** + * Approximate this cell union's area by summing the average area of + * each contained cell's average area, using {@link S2Cell#averageArea()}. + * This is equivalent to the number of leaves covered, multiplied by + * the average area of a leaf. + * Note that {@link S2Cell#averageArea()} does not take into account + * distortion of cell, and thus may be off by up to a factor of 1.7. + * NOTE: Since this is proportional to LeafCellsCovered(), it is + * always better to use the other function if all you care about is + * the relative average area between objects. + * + * @return the sum of the average area of each contained cell's average area + */ + public double averageBasedArea() { + return S2Cell.averageArea(S2CellId.MAX_LEVEL) * leafCellsCovered(); + } + + /** + * Calculates this cell union's area by summing the approximate area for each + * contained cell, using {@link S2Cell#approxArea()}. + * + * @return approximate area of the cell union + */ + public double approxArea() { + double area = 0; + for (S2CellId cellId : cellIds) { + area += new S2Cell(cellId).approxArea(); + } + return area; + } + + /** + * Calculates this cell union's area by summing the exact area for each + * contained cell, using the {@link S2Cell#exactArea()}. + * + * @return the exact area of the cell union + */ + public double exactArea() { + double area = 0; + for (S2CellId cellId : cellIds) { + area += new S2Cell(cellId).exactArea(); + } + return area; + } + + /** Return true if two cell unions are identical. */ + @Override + public boolean equals(Object that) { + if (!(that instanceof S2CellUnion)) { + return false; + } + S2CellUnion union = (S2CellUnion) that; + return this.cellIds.equals(union.cellIds); + } + + @Override + public int hashCode() { + int value = 17; + for (S2CellId id : this) { + value = 37 * value + id.hashCode(); + } + return value; + } + + /** + * Normalizes the cell union by discarding cells that are contained by other + * cells, replacing groups of 4 child cells by their parent cell whenever + * possible, and sorting all the cell ids in increasing order. Returns true if + * the number of cells was reduced. + * + * This method *must* be called before doing any calculations on the cell + * union, such as Intersects() or Contains(). + * + * @return true if the normalize operation had any effect on the cell union, + * false if the union was already normalized + */ + public boolean normalize() { + // Optimize the representation by looking for cases where all subcells + // of a parent cell are present. + + ArrayList<S2CellId> output = new ArrayList<S2CellId>(cellIds.size()); + output.ensureCapacity(cellIds.size()); + Collections.sort(cellIds); + + for (S2CellId id : this) { + int size = output.size(); + // Check whether this cell is contained by the previous cell. + if (!output.isEmpty() && output.get(size - 1).contains(id)) { + continue; + } + + // Discard any previous cells contained by this cell. + while (!output.isEmpty() && id.contains(output.get(output.size() - 1))) { + output.remove(output.size() - 1); + } + + // Check whether the last 3 elements of "output" plus "id" can be + // collapsed into a single parent cell. + while (output.size() >= 3) { + size = output.size(); + // A necessary (but not sufficient) condition is that the XOR of the + // four cells must be zero. This is also very fast to test. + if ((output.get(size - 3).id() ^ output.get(size - 2).id() ^ output.get(size - 1).id()) + != id.id()) { + break; + } + + // Now we do a slightly more expensive but exact test. First, compute a + // mask that blocks out the two bits that encode the child position of + // "id" with respect to its parent, then check that the other three + // children all agree with "mask. + long mask = id.lowestOnBit() << 1; + mask = ~(mask + (mask << 1)); + long idMasked = (id.id() & mask); + if ((output.get(size - 3).id() & mask) != idMasked + || (output.get(size - 2).id() & mask) != idMasked + || (output.get(size - 1).id() & mask) != idMasked || id.isFace()) { + break; + } + + // Replace four children by their parent cell. + output.remove(size - 1); + output.remove(size - 2); + output.remove(size - 3); + id = id.parent(); + } + output.add(id); + } + if (output.size() < size()) { + initRawSwap(output); + return true; + } + return false; + } +} diff --git a/src/com/google/common/geometry/S2Edge.java b/src/com/google/common/geometry/S2Edge.java new file mode 100644 index 0000000..35cf5c2 --- /dev/null +++ b/src/com/google/common/geometry/S2Edge.java @@ -0,0 +1,61 @@ +/* + * Copyright 2011 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +/** + * An abstract directed edge from one S2Point to another S2Point. + * + * @author kirilll@google.com (Kirill Levin) + */ +public class S2Edge { + + private final S2Point start; + private final S2Point end; + + public S2Edge(S2Point start, S2Point end) { + this.start = start; + this.end = end; + } + + public S2Point getStart() { + return start; + } + + public S2Point getEnd() { + return end; + } + + @Override + public String toString() { + return "Edge: (" + start.toDegreesString() + " -> " + end.toDegreesString() + ")\n" + " or [" + + start + " -> " + end + "]"; + } + + @Override + public int hashCode() { + return getStart().hashCode() - getEnd().hashCode(); + } + + @Override + public boolean equals(Object o) { + if (o == null || !(o instanceof S2Edge)) { + return false; + } + S2Edge other = (S2Edge) o; + return getStart().equals(other.getStart()) && getEnd().equals(other.getEnd()); + } +} diff --git a/src/com/google/common/geometry/S2EdgeIndex.java b/src/com/google/common/geometry/S2EdgeIndex.java new file mode 100644 index 0000000..43b41b8 --- /dev/null +++ b/src/com/google/common/geometry/S2EdgeIndex.java @@ -0,0 +1,612 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import com.google.common.collect.Multimap; +import com.google.common.collect.Sets; +import com.google.common.collect.TreeMultimap; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.SortedMap; + +public abstract strictfp class S2EdgeIndex { + + /** + * Thicken the edge in all directions by roughly 1% of the edge length when + * thickenEdge is true. + */ + private static final double THICKENING = 0.01; + + /** + * Threshold for small angles, that help lenientCrossing to determine whether + * two edges are likely to intersect. + */ + private static final double MAX_DET_ERROR = 1e-14; + + /** + * When we test a query edge against a cell, we don't recurse if there are + * only a few test edges in it. For testing, it is useful to always recurse to + * the end. NOTE: You don't want to set this to true anywhere but in tests. + */ + private static boolean alwaysRecurseOnChildren = false; + + /** + * Maps cell ids to covered edges; has the property that the set of all cell + * ids mapping to a particular edge forms a covering of that edge. + */ + private TreeMultimap<S2CellId, Integer> mapping = TreeMultimap.create(); + + /** + * No cell strictly below this level appears in mapping. Initially leaf level, + * that's the minimum level at which we will ever look for test edges. + */ + private int minimumS2LevelUsed; + + /** + * Has the index been computed already? + */ + private boolean indexComputed; + + /** + * Number of queries so far + */ + private int queryCount; + + /** + * Empties the index in case it already contained something. + */ + public void reset() { + minimumS2LevelUsed = S2CellId.MAX_LEVEL; + indexComputed = false; + queryCount = 0; + mapping.clear(); + } + + + /** + * Computes the index (if it has not been previously done). + */ + public void computeIndex() { + if (indexComputed) { + return; + } + + for (int i = 0; i < getNumEdges(); ++i) { + S2Point from = edgeFrom(i); + S2Point to = edgeTo(i); + ArrayList<S2CellId> cover = Lists.newArrayList(); + int level = getCovering(from, to, true, cover); + minimumS2LevelUsed = Math.min(minimumS2LevelUsed, level); + for (S2CellId cellId : cover) { + mapping.put(cellId, i); + } + } + indexComputed = true; + } + + public boolean isIndexComputed() { + return indexComputed; + } + + /** + * Tell the index that we just received a new request for candidates. Useful + * to compute when to switch to quad tree. + */ + protected void incrementQueryCount() { + ++queryCount; + } + + /** + * If the index hasn't been computed yet, looks at how much work has gone into + * iterating using the brute force method, and how much more work is planned + * as defined by 'cost'. If it were to have been cheaper to use a quad tree + * from the beginning, then compute it now. This guarantees that we will never + * use more than twice the time we would have used had we known in advance + * exactly how many edges we would have wanted to test. It is the theoretical + * best. + * + * The value 'n' is the number of iterators we expect to request from this + * edge index. + * + * If we have m data edges and n query edges, then the brute force cost is m + * * n * testCost where testCost is taken to be the cost of + * EdgeCrosser.robustCrossing, measured to be about 30ns at the time of this + * writing. + * + * If we compute the index, the cost becomes: m * costInsert + n * + * costFind(m) + * + * - costInsert can be expected to be reasonably stable, and was measured at + * 1200ns with the BM_QuadEdgeInsertionCost benchmark. + * + * - costFind depends on the length of the edge . For m=1000 edges, we got + * timings ranging from 1ms (edge the length of the polygon) to 40ms. The + * latter is for very long query edges, and needs to be optimized. We will + * assume for the rest of the discussion that costFind is roughly 3ms. + * + * When doing one additional query, the differential cost is m * testCost - + * costFind(m) With the numbers above, it is better to use the quad tree (if + * we have it) if m >= 100. + * + * If m = 100, 30 queries will give m*n*testCost = m * costInsert = 100ms, + * while the marginal cost to find is 3ms. Thus, this is a reasonable thing to + * do. + */ + public void predictAdditionalCalls(int n) { + if (indexComputed) { + return; + } + if (getNumEdges() > 100 && (queryCount + n) > 30) { + computeIndex(); + } + } + + /** + * Overwrite these functions to give access to the underlying data. The + * function getNumEdges() returns the number of edges in the index, while + * edgeFrom(index) and edgeTo(index) return the "from" and "to" endpoints of + * the edge at the given index. + */ + protected abstract int getNumEdges(); + + protected abstract S2Point edgeFrom(int index); + + protected abstract S2Point edgeTo(int index); + + + /** + * Appends to "candidateCrossings" all edge references which may cross the + * given edge. This is done by covering the edge and then finding all + * references of edges whose coverings overlap this covering. Parent cells are + * checked level by level. Child cells are checked all at once by taking + * advantage of the natural ordering of S2CellIds. + */ + protected void findCandidateCrossings(S2Point a, S2Point b, List<Integer> candidateCrossings) { + Preconditions.checkState(indexComputed); + ArrayList<S2CellId> cover = Lists.newArrayList(); + + getCovering(a, b, false, cover); + getEdgesInParentCells(cover, mapping, minimumS2LevelUsed, candidateCrossings); + + // TODO(user): An important optimization for long query + // edges (Contains queries): keep a bounding cap and clip the query + // edge to the cap before starting the descent. + getEdgesInChildrenCells(a, b, cover, mapping, candidateCrossings); + + // Remove duplicates: This is necessary because edge references are + // inserted into the map once for each covering cell. + Set<Integer> uniqueSet = new HashSet<Integer>(candidateCrossings); + candidateCrossings.clear(); + candidateCrossings.addAll(uniqueSet); + } + + /** + * Sets recursion on for testing a query edge against a cell. We don't recurse + * if there are only a few test edges in it. For testing, it is useful to + * always recurse to the end. + * + * Note: You generally don't want to set this to true anywhere but in tests. + */ + public static void setAlwaysRecurseOnChildren(boolean alwaysRecurseOnChildrenValue) { + alwaysRecurseOnChildren = alwaysRecurseOnChildrenValue; + } + + /** + * Returns the smallest cell containing all four points, or + * {@link S2CellId#sentinel()} if they are not all on the same face. The + * points don't need to be normalized. + */ + private static S2CellId containingCell(S2Point pa, S2Point pb, S2Point pc, S2Point pd) { + S2CellId a = S2CellId.fromPoint(pa); + S2CellId b = S2CellId.fromPoint(pb); + S2CellId c = S2CellId.fromPoint(pc); + S2CellId d = S2CellId.fromPoint(pd); + + if (a.face() != b.face() || a.face() != c.face() || a.face() != d.face()) { + return S2CellId.sentinel(); + } + + while (!a.equals(b) || !a.equals(c) || !a.equals(d)) { + a = a.parent(); + b = b.parent(); + c = c.parent(); + d = d.parent(); + } + return a; + } + + /** + * Returns the smallest cell containing both points, or Sentinel if they are + * not all on the same face. The points don't need to be normalized. + */ + private static S2CellId containingCell(S2Point pa, S2Point pb) { + S2CellId a = S2CellId.fromPoint(pa); + S2CellId b = S2CellId.fromPoint(pb); + + if (a.face() != b.face()) { + return S2CellId.sentinel(); + } + + while (!a.equals(b)) { + a = a.parent(); + b = b.parent(); + } + return a; + } + + /** + * Computes a cell covering of an edge. Clears edgeCovering and returns the + * level of the s2 cells used in the covering (only one level is ever used for + * each call). + * + * If thickenEdge is true, the edge is thickened and extended by 1% of its + * length. + * + * It is guaranteed that no child of a covering cell will fully contain the + * covered edge. + */ + private int getCovering( + S2Point a, S2Point b, boolean thickenEdge, ArrayList<S2CellId> edgeCovering) { + edgeCovering.clear(); + + // kMinWidth taken from util/geometry/s2.[h,cc], and assumes that + // (S2_PROJECTION == S2_QUADRATIC_PROJECTION). + // TODO(andriy): this should live in S2.java, but that part of the code + // hasn't been ported from the C++ S2 library yet, so putting our own + // copy here for now. Also, this Java version differs from the C++ version + // in that the C++ version uses a different coordinate system (in the C++ + // version, CL 1904327 changed the definition of the (s,t) coordinate + // system to occupy the square [0,1]x[0,1] rather than [-1,1]x[-1,1]. + // So, kMinWidth here reflects the old [-1,1]x[-1,1] coordinate system. + S2.Metric kMinWidth = new S2.Metric(1, Math.sqrt(2) / 3 /* 0.471 */); + + // Selects the ideal s2 level at which to cover the edge, this will be the + // level whose S2 cells have a width roughly commensurate to the length of + // the edge. We multiply the edge length by 2*THICKENING to guarantee the + // thickening is honored (it's not a big deal if we honor it when we don't + // request it) when doing the covering-by-cap trick. + double edgeLength = a.angle(b); + int idealLevel = kMinWidth.getMaxLevel(edgeLength * (1 + 2 * THICKENING)); + + S2CellId containingCellId; + if (!thickenEdge) { + containingCellId = containingCell(a, b); + } else { + if (idealLevel == S2CellId.MAX_LEVEL) { + // If the edge is tiny, instabilities are more likely, so we + // want to limit the number of operations. + // We pretend we are in a cell much larger so as to trigger the + // 'needs covering' case, so we won't try to thicken the edge. + containingCellId = (new S2CellId(0xFFF0)).parent(3); + } else { + S2Point pq = S2Point.mul(S2Point.minus(b, a), THICKENING); + S2Point ortho = + S2Point.mul(S2Point.normalize(S2Point.crossProd(pq, a)), edgeLength * THICKENING); + S2Point p = S2Point.minus(a, pq); + S2Point q = S2Point.add(b, pq); + // If p and q were antipodal, the edge wouldn't be lengthened, + // and it could even flip! This is not a problem because + // idealLevel != 0 here. The farther p and q can be is roughly + // a quarter Earth away from each other, so we remain + // Theta(THICKENING). + containingCellId = + containingCell(S2Point.minus(p, ortho), S2Point.add(p, ortho), S2Point.minus(q, ortho), + S2Point.add(q, ortho)); + } + } + + // Best case: edge is fully contained in a cell that's not too big. + if (!containingCellId.equals(S2CellId.sentinel()) + && containingCellId.level() >= idealLevel - 2) { + edgeCovering.add(containingCellId); + return containingCellId.level(); + } + + if (idealLevel == 0) { + // Edge is very long, maybe even longer than a face width, so the + // trick below doesn't work. For now, we will add the whole S2 sphere. + // TODO(user): Do something a tad smarter (and beware of the + // antipodal case). + for (S2CellId cellid = S2CellId.begin(0); !cellid.equals(S2CellId.end(0)); + cellid = cellid.next()) { + edgeCovering.add(cellid); + } + return 0; + } + // TODO(user): Check trick below works even when vertex is at + // interface + // between three faces. + + // Use trick as in S2PolygonBuilder.PointIndex.findNearbyPoint: + // Cover the edge by a cap centered at the edge midpoint, then cover + // the cap by four big-enough cells around the cell vertex closest to the + // cap center. + S2Point middle = S2Point.normalize(S2Point.div(S2Point.add(a, b), 2)); + int actualLevel = Math.min(idealLevel, S2CellId.MAX_LEVEL - 1); + S2CellId.fromPoint(middle).getVertexNeighbors(actualLevel, edgeCovering); + return actualLevel; + } + + /** + * Adds to candidateCrossings all the edges present in any ancestor of any + * cell of cover, down to minimumS2LevelUsed. The cell->edge map is in the + * variable mapping. + */ + private static void getEdgesInParentCells(List<S2CellId> cover, + Multimap<S2CellId, Integer> mapping, int minimumS2LevelUsed, + List<Integer> candidateCrossings) { + // Find all parent cells of covering cells. + Set<S2CellId> parentCells = Sets.newHashSet(); + for (S2CellId coverCell : cover) { + for (int parentLevel = coverCell.level() - 1; parentLevel >= minimumS2LevelUsed; + --parentLevel) { + if (!parentCells.add(coverCell.parent(parentLevel))) { + break; // cell is already in => parents are too. + } + } + } + + // Put parent cell edge references into result. + for (S2CellId parentCell : parentCells) { + for (Integer parentCellInt : mapping.get(parentCell)) { + candidateCrossings.add(parentCellInt); + } + } + } + + /** + * Returns true if ab possibly crosses cd, by clipping tiny angles to zero. + */ + private static boolean lenientCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { + Preconditions.checkArgument(S2.isUnitLength(a)); + Preconditions.checkArgument(S2.isUnitLength(b)); + Preconditions.checkArgument(S2.isUnitLength(c)); + + double acb = S2Point.crossProd(a, c).dotProd(b); + double bda = S2Point.crossProd(b, d).dotProd(a); + if (Math.abs(acb) < MAX_DET_ERROR || Math.abs(bda) < MAX_DET_ERROR) { + return true; + } + if (acb * bda < 0) { + return false; + } + double cbd = S2Point.crossProd(c, b).dotProd(d); + double dac = S2Point.crossProd(c, a).dotProd(c); + if (Math.abs(cbd) < MAX_DET_ERROR || Math.abs(dac) < MAX_DET_ERROR) { + return true; + } + return (acb * cbd >= 0) && (acb * dac >= 0); + } + + /** + * Returns true if the edge and the cell (including boundary) intersect. + */ + private static boolean edgeIntersectsCellBoundary(S2Point a, S2Point b, S2Cell cell) { + S2Point[] vertices = new S2Point[4]; + for (int i = 0; i < 4; ++i) { + vertices[i] = cell.getVertex(i); + } + for (int i = 0; i < 4; ++i) { + S2Point fromPoint = vertices[i]; + S2Point toPoint = vertices[(i + 1) % 4]; + if (lenientCrossing(a, b, fromPoint, toPoint)) { + return true; + } + } + return false; + } + + /** + * Generates an S2CellId whose identifier is one greater than the one passed + * to it; this is useful for when we need to pass to an iterator or filter an + * upper non-inclusive bound. + */ + private static S2CellId generateNextSequentialId(S2CellId cellId) { + return new S2CellId(cellId.id() + 1); + } + + /** + * Appends to candidateCrossings the edges that are fully contained in an S2 + * covering of edge. The covering of edge used is initially cover, but is + * refined to eliminate quickly subcells that contain many edges but do not + * intersect with edge. + */ + private static void getEdgesInChildrenCells(S2Point a, S2Point b, List<S2CellId> cover, + TreeMultimap<S2CellId, Integer> mapping, List<Integer> candidateCrossings) { + int numCells = 0; + + // Put all edge references of (covering cells + descendant cells) into + // result. + // This relies on the natural ordering of S2CellIds. + while (!cover.isEmpty()) { + int last = cover.size() - 1; + S2CellId cell = cover.get(last); + cover.remove(last); + ++numCells; + int numEdges = 0; + boolean rewind = alwaysRecurseOnChildren; + + SortedMap<S2CellId, Collection<Integer>> sortedMap = + mapping.asMap().subMap(cell.rangeMin(), generateNextSequentialId(cell.rangeMax())); + + // TODO(user): Maybe distinguish between edges in current cell, + // that + // are going to be added anyhow, and edges in subcells, and rewind only + // those. + if (!rewind) { + for (Collection<Integer> ints : sortedMap.values()) { + for (Integer ivalue : ints) { + candidateCrossings.add(ivalue); + ++numEdges; + if (numEdges == 16 && !cell.isLeaf()) { + rewind = true; + break; + } + } + if (rewind == true) { + break; + } + } + } + // If there are too many to insert, uninsert and recurse. + if (rewind) { + for (int i = 0; i < numEdges; ++i) { + candidateCrossings.remove(candidateCrossings.size() - 1); + } + // Add cells at this level + SortedMap<S2CellId, Collection<Integer>> sortedSmallerMap = + mapping.asMap().subMap(cell, generateNextSequentialId(cell)); + for (Collection<Integer> ints : sortedSmallerMap.values()) { + for (Integer ivalue : ints) { + candidateCrossings.add(ivalue); + } + } + // Recurse on the children -- hopefully some will be empty. + if (sortedSmallerMap.size() < sortedMap.size()) { + S2Cell[] children = new S2Cell[4]; + for (int i = 0; i < 4; ++i) { + children[i] = new S2Cell(); + } + S2Cell c = new S2Cell(cell); + c.subdivide(children); + for (int i = 0; i < 4; ++i) { + // TODO(user): Do the check for the four cells at once, + // as it is enough to check the four edges between the cells. At + // this time, we are checking 16 edges, 4 times too many. + // + // Note that given the guarantee of AppendCovering, it is enough + // to check that the edge intersect with the cell boundary as it + // cannot be fully contained in a cell. + if (edgeIntersectsCellBoundary(a, b, children[i])) { + cover.add(children[i].id()); + } + } + } + } + } + // log.info("Num cells traversed: " + Integer.toString(numCells)); + } + + /* + * An iterator on data edges that may cross a query edge (a,b). Create the + * iterator, call getCandidates(), then hasNext()/next() repeatedly. + * + * The current edge in the iteration has index index(), goes between from() + * and to(). + */ + public static class DataEdgeIterator { + /** + * The structure containing the data edges. + */ + private S2EdgeIndex edgeIndex; + + /** + * Tells whether getCandidates() obtained the candidates through brute force + * iteration or using the quad tree structure. + */ + private boolean isBruteForce; + + /** + * Index of the current edge and of the edge before the last next() call. + */ + private int currentIndex; + + /** + * Cache of edgeIndex.getNumEdges() so that hasNext() doesn't make an extra + * call + */ + private int numEdges; + + /** + * All the candidates obtained by getCandidates() when we are using a + * quad-tree (i.e. isBruteForce = false). + */ + ArrayList<Integer> candidates; + + /** + * Index within array above. We have: currentIndex = + * candidates.get(currentIndexInCandidates). + */ + private int currentIndexInCandidates; + + public DataEdgeIterator(S2EdgeIndex edgeIndex) { + this.edgeIndex = edgeIndex; + candidates = Lists.newArrayList(); + } + + /** + * Initializes the iterator to iterate over a set of candidates that may + * cross the edge (a,b). + */ + public void getCandidates(S2Point a, S2Point b) { + edgeIndex.predictAdditionalCalls(1); + isBruteForce = !edgeIndex.isIndexComputed(); + if (isBruteForce) { + edgeIndex.incrementQueryCount(); + currentIndex = 0; + numEdges = edgeIndex.getNumEdges(); + } else { + candidates.clear(); + edgeIndex.findCandidateCrossings(a, b, candidates); + currentIndexInCandidates = 0; + if (!candidates.isEmpty()) { + currentIndex = candidates.get(0); + } + } + } + + /** + * Index of the current edge in the iteration. + */ + public int index() { + Preconditions.checkState(hasNext()); + return currentIndex; + } + + /** + * False if there are no more candidates; true otherwise. + */ + public boolean hasNext() { + if (isBruteForce) { + return (currentIndex < numEdges); + } else { + return currentIndexInCandidates < candidates.size(); + } + } + + /** + * Iterate to the next available candidate. + */ + public void next() { + Preconditions.checkState(hasNext()); + if (isBruteForce) { + ++currentIndex; + } else { + ++currentIndexInCandidates; + if (currentIndexInCandidates < candidates.size()) { + currentIndex = candidates.get(currentIndexInCandidates); + } + } + } + } +} diff --git a/src/com/google/common/geometry/S2EdgeUtil.java b/src/com/google/common/geometry/S2EdgeUtil.java new file mode 100644 index 0000000..74ec060 --- /dev/null +++ b/src/com/google/common/geometry/S2EdgeUtil.java @@ -0,0 +1,710 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.base.Preconditions; + +/** + * This class contains various utility functions related to edges. It collects + * together common code that is needed to implement polygonal geometry such as + * polylines, loops, and general polygons. + * + */ +public strictfp class S2EdgeUtil { + /** + * IEEE floating-point operations have a maximum error of 0.5 ULPS (units in + * the last place). For double-precision numbers, this works out to 2**-53 + * (about 1.11e-16) times the magnitude of the result. It is possible to + * analyze the calculation done by getIntersection() and work out the + * worst-case rounding error. I have done a rough version of this, and my + * estimate is that the worst case distance from the intersection point X to + * the great circle through (a0, a1) is about 12 ULPS, or about 1.3e-15. This + * needs to be increased by a factor of (1/0.866) to account for the + * edgeSpliceFraction() in S2PolygonBuilder. Note that the maximum error + * measured by the unittest in 1,000,000 trials is less than 3e-16. + */ + public static final S1Angle DEFAULT_INTERSECTION_TOLERANCE = S1Angle.radians(1.5e-15); + + /** + * This class allows a vertex chain v0, v1, v2, ... to be efficiently tested + * for intersection with a given fixed edge AB. + */ + public static class EdgeCrosser { + // The fields below are all constant. + + private final S2Point a; + private final S2Point b; + private final S2Point aCrossB; + + // The fields below are updated for each vertex in the chain. + + // Previous vertex in the vertex chain. + private S2Point c; + // The orientation of the triangle ACB. + private int acb; + + /** + * AB is the given fixed edge, and C is the first vertex of the vertex + * chain. All parameters must point to fixed storage that persists for the + * lifetime of the EdgeCrosser object. + */ + public EdgeCrosser(S2Point a, S2Point b, S2Point c) { + this.a = a; + this.b = b; + this.aCrossB = S2Point.crossProd(a, b); + restartAt(c); + } + + /** + * Call this function when your chain 'jumps' to a new place. + */ + public void restartAt(S2Point c) { + this.c = c; + acb = -S2.robustCCW(a, b, c, aCrossB); + } + + /** + * This method is equivalent to calling the S2EdgeUtil.robustCrossing() + * function (defined below) on the edges AB and CD. It returns +1 if there + * is a crossing, -1 if there is no crossing, and 0 if two points from + * different edges are the same. Returns 0 or -1 if either edge is + * degenerate. As a side effect, it saves vertex D to be used as the next + * vertex C. + */ + public int robustCrossing(S2Point d) { + // For there to be an edge crossing, the triangles ACB, CBD, BDA, DAC must + // all be oriented the same way (CW or CCW). We keep the orientation + // of ACB as part of our state. When each new point D arrives, we + // compute the orientation of BDA and check whether it matches ACB. + // This checks whether the points C and D are on opposite sides of the + // great circle through AB. + + // Recall that robustCCW is invariant with respect to rotating its + // arguments, i.e. ABC has the same orientation as BDA. + int bda = S2.robustCCW(a, b, d, aCrossB); + int result; + + if (bda == -acb && bda != 0) { + // Most common case -- triangles have opposite orientations. + result = -1; + } else if ((bda & acb) == 0) { + // At least one value is zero -- two vertices are identical. + result = 0; + } else { + // assert (bda == acb && bda != 0); + result = robustCrossingInternal(d); // Slow path. + } + // Now save the current vertex D as the next vertex C, and also save the + // orientation of the new triangle ACB (which is opposite to the current + // triangle BDA). + c = d; + acb = -bda; + return result; + } + + /** + * This method is equivalent to the S2EdgeUtil.edgeOrVertexCrossing() method + * defined below. It is similar to robustCrossing, but handles cases where + * two vertices are identical in a way that makes it easy to implement + * point-in-polygon containment tests. + */ + public boolean edgeOrVertexCrossing(S2Point d) { + // We need to copy c since it is clobbered by robustCrossing(). + S2Point c2 = new S2Point(c.get(0), c.get(1), c.get(2)); + + int crossing = robustCrossing(d); + if (crossing < 0) { + return false; + } + if (crossing > 0) { + return true; + } + + return vertexCrossing(a, b, c2, d); + } + + /** + * This function handles the "slow path" of robustCrossing(). + */ + private int robustCrossingInternal(S2Point d) { + // ACB and BDA have the appropriate orientations, so now we check the + // triangles CBD and DAC. + S2Point cCrossD = S2Point.crossProd(c, d); + int cbd = -S2.robustCCW(c, d, b, cCrossD); + if (cbd != acb) { + return -1; + } + + int dac = S2.robustCCW(c, d, a, cCrossD); + return (dac == acb) ? 1 : -1; + } + } + + /** + * This class computes a bounding rectangle that contains all edges defined by + * a vertex chain v0, v1, v2, ... All vertices must be unit length. Note that + * the bounding rectangle of an edge can be larger than the bounding rectangle + * of its endpoints, e.g. consider an edge that passes through the north pole. + */ + public static class RectBounder { + // The previous vertex in the chain. + private S2Point a; + + // The corresponding latitude-longitude. + private S2LatLng aLatLng; + + // The current bounding rectangle. + private S2LatLngRect bound; + + public RectBounder() { + this.bound = S2LatLngRect.empty(); + } + + /** + * This method is called to add each vertex to the chain. 'b' must point to + * fixed storage that persists for the lifetime of the RectBounder. + */ + public void addPoint(S2Point b) { + // assert (S2.isUnitLength(b)); + + S2LatLng bLatLng = new S2LatLng(b); + + if (bound.isEmpty()) { + bound = bound.addPoint(bLatLng); + } else { + // We can't just call bound.addPoint(bLatLng) here, since we need to + // ensure that all the longitudes between "a" and "b" are included. + bound = bound.union(S2LatLngRect.fromPointPair(aLatLng, bLatLng)); + + // Check whether the min/max latitude occurs in the edge interior. + // We find the normal to the plane containing AB, and then a vector + // "dir" in this plane that also passes through the equator. We use + // RobustCrossProd to ensure that the edge normal is accurate even + // when the two points are very close together. + S2Point aCrossB = S2.robustCrossProd(a, b); + S2Point dir = S2Point.crossProd(aCrossB, new S2Point(0, 0, 1)); + double da = dir.dotProd(a); + double db = dir.dotProd(b); + + if (da * db < 0) { + // Minimum/maximum latitude occurs in the edge interior. This affects + // the latitude bounds but not the longitude bounds. + double absLat = Math.acos(Math.abs(aCrossB.get(2) / aCrossB.norm())); + if (da < 0) { + // It's possible that absLat < lat.lo() due to numerical errors. + bound.lat().setHi((Math.max(absLat, bound.lat().hi()))); + } else { + bound.lat().setLo(Math.min(-absLat, bound.lat().lo())); + } + } + } + a = b; + aLatLng = bLatLng; + } + + /** + * Return the bounding rectangle of the edge chain that connects the + * vertices defined so far. + */ + public S2LatLngRect getBound() { + return bound; + } + + } + + /** + * The purpose of this class is to find edges that intersect a given longitude + * interval. It can be used as an efficient rejection test when attempting to + * find edges that intersect a given region. It accepts a vertex chain v0, v1, + * v2, ... and returns a boolean value indicating whether each edge intersects + * the specified longitude interval. + */ + public static class LongitudePruner { + // The interval to be tested against. + private S1Interval interval; + + // The longitude of the next v0. + private double lng0; + + /** + *'interval' is the longitude interval to be tested against, and 'v0' is + * the first vertex of edge chain. + */ + public LongitudePruner(S1Interval interval, S2Point v0) { + this.interval = interval; + this.lng0 = S2LatLng.longitude(v0).radians(); + } + + /** + * Returns true if the edge (v0, v1) intersects the given longitude + * interval, and then saves 'v1' to be used as the next 'v0'. + */ + public boolean intersects(S2Point v1) { + double lng1 = S2LatLng.longitude(v1).radians(); + boolean result = interval.intersects(S1Interval.fromPointPair(lng0, lng1)); + lng0 = lng1; + return result; + } + } + + /** + * A wedge relation's test method accepts two edge chains A=(a0,a1,a2) and + * B=(b0,b1,b2) where a1==b1, and returns either -1, 0, or 1 to indicate the + * relationship between the region to the left of A and the region to the left + * of B. Wedge relations are used to determine the local relationship between + * two polygons that share a common vertex. + * + * All wedge relations require that a0 != a2 and b0 != b2. Other degenerate + * cases (such as a0 == b2) are handled as expected. The parameter "ab1" + * denotes the common vertex a1 == b1. + */ + public interface WedgeRelation { + int test(S2Point a0, S2Point ab1, S2Point a2, S2Point b0, S2Point b2); + } + + public static class WedgeContains implements WedgeRelation { + /** + * Given two edge chains (see WedgeRelation above), this function returns +1 + * if the region to the left of A contains the region to the left of B, and + * 0 otherwise. + */ + @Override + public int test(S2Point a0, S2Point ab1, S2Point a2, S2Point b0, S2Point b2) { + // For A to contain B (where each loop interior is defined to be its left + // side), the CCW edge order around ab1 must be a2 b2 b0 a0. We split + // this test into two parts that test three vertices each. + return S2.orderedCCW(a2, b2, b0, ab1) && S2.orderedCCW(b0, a0, a2, ab1) ? 1 : 0; + } + } + + public static class WedgeIntersects implements WedgeRelation { + /** + * Given two edge chains (see WedgeRelation above), this function returns -1 + * if the region to the left of A intersects the region to the left of B, + * and 0 otherwise. Note that regions are defined such that points along a + * boundary are contained by one side or the other, not both. So for + * example, if A,B,C are distinct points ordered CCW around a vertex O, then + * the wedges BOA, AOC, and COB do not intersect. + */ + @Override + public int test(S2Point a0, S2Point ab1, S2Point a2, S2Point b0, S2Point b2) { + // For A not to intersect B (where each loop interior is defined to be + // its left side), the CCW edge order around ab1 must be a0 b2 b0 a2. + // Note that it's important to write these conditions as negatives + // (!OrderedCCW(a,b,c,o) rather than Ordered(c,b,a,o)) to get correct + // results when two vertices are the same. + return (S2.orderedCCW(a0, b2, b0, ab1) && S2.orderedCCW(b0, a2, a0, ab1) ? 0 : -1); + } + } + + public static class WedgeContainsOrIntersects implements WedgeRelation { + /** + * Given two edge chains (see WedgeRelation above), this function returns +1 + * if A contains B, 0 if A and B are disjoint, and -1 if A intersects but + * does not contain B. + */ + @Override + public int test(S2Point a0, S2Point ab1, S2Point a2, S2Point b0, S2Point b2) { + // This is similar to WedgeContainsOrCrosses, except that we want to + // distinguish cases (1) [A contains B], (3) [A and B are disjoint], + // and (2,4,5,6) [A intersects but does not contain B]. + + if (S2.orderedCCW(a0, a2, b2, ab1)) { + // We are in case 1, 5, or 6, or case 2 if a2 == b2. + return S2.orderedCCW(b2, b0, a0, ab1) ? 1 : -1; // Case 1 vs. 2,5,6. + } + // We are in cases 2, 3, or 4. + if (!S2.orderedCCW(a2, b0, b2, ab1)) { + return 0; // Case 3. + } + + // We are in case 2 or 4, or case 3 if a2 == b0. + return (a2.equals(b0)) ? 0 : -1; // Case 3 vs. 2,4. + } + } + + public static class WedgeContainsOrCrosses implements WedgeRelation { + /** + * Given two edge chains (see WedgeRelation above), this function returns +1 + * if A contains B, 0 if B contains A or the two wedges do not intersect, + * and -1 if the edge chains A and B cross each other (i.e. if A intersects + * both the interior and exterior of the region to the left of B). In + * degenerate cases where more than one of these conditions is satisfied, + * the maximum possible result is returned. For example, if A == B then the + * result is +1. + */ + @Override + public int test(S2Point a0, S2Point ab1, S2Point a2, S2Point b0, S2Point b2) { + // There are 6 possible edge orderings at a shared vertex (all + // of these orderings are circular, i.e. abcd == bcda): + // + // (1) a2 b2 b0 a0: A contains B + // (2) a2 a0 b0 b2: B contains A + // (3) a2 a0 b2 b0: A and B are disjoint + // (4) a2 b0 a0 b2: A and B intersect in one wedge + // (5) a2 b2 a0 b0: A and B intersect in one wedge + // (6) a2 b0 b2 a0: A and B intersect in two wedges + // + // In cases (4-6), the boundaries of A and B cross (i.e. the boundary + // of A intersects the interior and exterior of B and vice versa). + // Thus we want to distinguish cases (1), (2-3), and (4-6). + // + // Note that the vertices may satisfy more than one of the edge + // orderings above if two or more vertices are the same. The tests + // below are written so that we take the most favorable + // interpretation, i.e. preferring (1) over (2-3) over (4-6). In + // particular note that if orderedCCW(a,b,c,o) returns true, it may be + // possible that orderedCCW(c,b,a,o) is also true (if a == b or b == c). + + if (S2.orderedCCW(a0, a2, b2, ab1)) { + // The cases with this vertex ordering are 1, 5, and 6, + // although case 2 is also possible if a2 == b2. + if (S2.orderedCCW(b2, b0, a0, ab1)) { + return 1; // Case 1 (A contains B) + } + + // We are in case 5 or 6, or case 2 if a2 == b2. + return (a2.equals(b2)) ? 0 : -1; // Case 2 vs. 5,6. + } + // We are in case 2, 3, or 4. + return S2.orderedCCW(a0, b0, a2, ab1) ? 0 : -1; // Case 2,3 vs. 4. + } + } + + /** + * Return true if edge AB crosses CD at a point that is interior to both + * edges. Properties: + * + * (1) simpleCrossing(b,a,c,d) == simpleCrossing(a,b,c,d) (2) + * simpleCrossing(c,d,a,b) == simpleCrossing(a,b,c,d) + */ + public static boolean simpleCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { + // We compute simpleCCW() for triangles ACB, CBD, BDA, and DAC. All + // of these triangles need to have the same orientation (CW or CCW) + // for an intersection to exist. Note that this is slightly more + // restrictive than the corresponding definition for planar edges, + // since we need to exclude pairs of line segments that would + // otherwise "intersect" by crossing two antipodal points. + + S2Point ab = S2Point.crossProd(a, b); + double acb = -(ab.dotProd(c)); + double bda = ab.dotProd(d); + if (acb * bda <= 0) { + return false; + } + + S2Point cd = S2Point.crossProd(c, d); + double cbd = -(cd.dotProd(b)); + double dac = cd.dotProd(a); + return (acb * cbd > 0) && (acb * dac > 0); + } + + /** + * Like SimpleCrossing, except that points that lie exactly on a line are + * arbitrarily classified as being on one side or the other (according to the + * rules of S2.robustCCW). It returns +1 if there is a crossing, -1 if there + * is no crossing, and 0 if any two vertices from different edges are the + * same. Returns 0 or -1 if either edge is degenerate. Properties of + * robustCrossing: + * + * (1) robustCrossing(b,a,c,d) == robustCrossing(a,b,c,d) (2) + * robustCrossing(c,d,a,b) == robustCrossing(a,b,c,d) (3) + * robustCrossing(a,b,c,d) == 0 if a==c, a==d, b==c, b==d (3) + * robustCrossing(a,b,c,d) <= 0 if a==b or c==d + * + * Note that if you want to check an edge against a *chain* of other edges, + * it is much more efficient to use an EdgeCrosser (above). + */ + public static int robustCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { + // For there to be a crossing, the triangles ACB, CBD, BDA, DAC must + // all have the same orientation (clockwise or counterclockwise). + // + // First we compute the orientation of ACB and BDA. We permute the + // arguments to robustCCW so that we can reuse the cross-product of A and B. + // Recall that when the arguments to robustCCW are permuted, the sign of the + // result changes according to the sign of the permutation. Thus ACB and + // ABC are oppositely oriented, while BDA and ABD are the same. + S2Point aCrossB = S2Point.crossProd(a, b); + int acb = -S2.robustCCW(a, b, c, aCrossB); + int bda = S2.robustCCW(a, b, d, aCrossB); + + // If any two vertices are the same, the result is degenerate. + if ((bda & acb) == 0) { + return 0; + } + + // If ABC and BDA have opposite orientations (the most common case), + // there is no crossing. + if (bda != acb) { + return -1; + } + + // Otherwise we compute the orientations of CBD and DAC, and check whether + // their orientations are compatible with the other two triangles. + S2Point cCrossD = S2Point.crossProd(c, d); + int cbd = -S2.robustCCW(c, d, b, cCrossD); + if (cbd != acb) { + return -1; + } + + int dac = S2.robustCCW(c, d, a, cCrossD); + return (dac == acb) ? 1 : -1; + } + + /** + * Given two edges AB and CD where at least two vertices are identical (i.e. + * robustCrossing(a,b,c,d) == 0), this function defines whether the two edges + * "cross" in a such a way that point-in-polygon containment tests can be + * implemented by counting the number of edge crossings. The basic rule is + * that a "crossing" occurs if AB is encountered after CD during a CCW sweep + * around the shared vertex starting from a fixed reference point. + * + * Note that according to this rule, if AB crosses CD then in general CD does + * not cross AB. However, this leads to the correct result when counting + * polygon edge crossings. For example, suppose that A,B,C are three + * consecutive vertices of a CCW polygon. If we now consider the edge + * crossings of a segment BP as P sweeps around B, the crossing number changes + * parity exactly when BP crosses BA or BC. + * + * Useful properties of VertexCrossing (VC): + * + * (1) VC(a,a,c,d) == VC(a,b,c,c) == false (2) VC(a,b,a,b) == VC(a,b,b,a) == + * true (3) VC(a,b,c,d) == VC(a,b,d,c) == VC(b,a,c,d) == VC(b,a,d,c) (3) If + * exactly one of a,b equals one of c,d, then exactly one of VC(a,b,c,d) and + * VC(c,d,a,b) is true + * + * It is an error to call this method with 4 distinct vertices. + */ + public static boolean vertexCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { + // If A == B or C == D there is no intersection. We need to check this + // case first in case 3 or more input points are identical. + if (a.equals(b) || c.equals(d)) { + return false; + } + + // If any other pair of vertices is equal, there is a crossing if and only + // if orderedCCW() indicates that the edge AB is further CCW around the + // shared vertex than the edge CD. + if (a.equals(d)) { + return S2.orderedCCW(S2.ortho(a), c, b, a); + } + if (b.equals(c)) { + return S2.orderedCCW(S2.ortho(b), d, a, b); + } + if (a.equals(c)) { + return S2.orderedCCW(S2.ortho(a), d, b, a); + } + if (b.equals(d)) { + return S2.orderedCCW(S2.ortho(b), c, a, b); + } + + // assert (false); + return false; + } + + /** + * A convenience function that calls robustCrossing() to handle cases where + * all four vertices are distinct, and VertexCrossing() to handle cases where + * two or more vertices are the same. This defines a crossing function such + * that point-in-polygon containment tests can be implemented by simply + * counting edge crossings. + */ + public static boolean edgeOrVertexCrossing(S2Point a, S2Point b, S2Point c, S2Point d) { + int crossing = robustCrossing(a, b, c, d); + if (crossing < 0) { + return false; + } + if (crossing > 0) { + return true; + } + return vertexCrossing(a, b, c, d); + } + + static class CloserResult { + private double dmin2; + private S2Point vmin; + + public double getDmin2() { + return dmin2; + } + + public S2Point getVmin() { + return vmin; + } + + public CloserResult(double dmin2, S2Point vmin) { + this.dmin2 = dmin2; + this.vmin = vmin; + } + + public void replaceIfCloser(S2Point x, S2Point y) { + // If the squared distance from x to y is less than dmin2, then replace + // vmin by y and update dmin2 accordingly. + double d2 = S2Point.minus(x, y).norm2(); + if (d2 < dmin2 || (d2 == dmin2 && y.lessThan(vmin))) { + dmin2 = d2; + vmin = y; + } + } + } + + /* + * Given two edges AB and CD such that robustCrossing() is true, return their + * intersection point. Useful properties of getIntersection (GI): + * + * (1) GI(b,a,c,d) == GI(a,b,d,c) == GI(a,b,c,d) (2) GI(c,d,a,b) == + * GI(a,b,c,d) + * + * The returned intersection point X is guaranteed to be close to the edges AB + * and CD, but if the edges intersect at a very small angle then X may not be + * close to the true mathematical intersection point P. See the description of + * "DEFAULT_INTERSECTION_TOLERANCE" below for details. + */ + public static S2Point getIntersection(S2Point a0, S2Point a1, S2Point b0, S2Point b1) { + Preconditions.checkArgument(robustCrossing(a0, a1, b0, b1) > 0, + "Input edges a0a1 and b0b1 muct have a true robustCrossing."); + + // We use robustCrossProd() to get accurate results even when two endpoints + // are close together, or when the two line segments are nearly parallel. + S2Point aNorm = S2Point.normalize(S2.robustCrossProd(a0, a1)); + S2Point bNorm = S2Point.normalize(S2.robustCrossProd(b0, b1)); + S2Point x = S2Point.normalize(S2.robustCrossProd(aNorm, bNorm)); + + // Make sure the intersection point is on the correct side of the sphere. + // Since all vertices are unit length, and edges are less than 180 degrees, + // (a0 + a1) and (b0 + b1) both have positive dot product with the + // intersection point. We use the sum of all vertices to make sure that the + // result is unchanged when the edges are reversed or exchanged. + if (x.dotProd(S2Point.add(S2Point.add(a0, a1), S2Point.add(b0, b1))) < 0) { + x = S2Point.neg(x); + } + + // The calculation above is sufficient to ensure that "x" is within + // DEFAULT_INTERSECTION_TOLERANCE of the great circles through (a0,a1) and + // (b0,b1). + // However, if these two great circles are very close to parallel, it is + // possible that "x" does not lie between the endpoints of the given line + // segments. In other words, "x" might be on the great circle through + // (a0,a1) but outside the range covered by (a0,a1). In this case we do + // additional clipping to ensure that it does. + + if (S2.orderedCCW(a0, x, a1, aNorm) && S2.orderedCCW(b0, x, b1, bNorm)) { + return x; + } + + // Find the acceptable endpoint closest to x and return it. An endpoint is + // acceptable if it lies between the endpoints of the other line segment. + CloserResult r = new CloserResult(10, x); + if (S2.orderedCCW(b0, a0, b1, bNorm)) { + r.replaceIfCloser(x, a0); + } + if (S2.orderedCCW(b0, a1, b1, bNorm)) { + r.replaceIfCloser(x, a1); + } + if (S2.orderedCCW(a0, b0, a1, aNorm)) { + r.replaceIfCloser(x, b0); + } + if (S2.orderedCCW(a0, b1, a1, aNorm)) { + r.replaceIfCloser(x, b1); + } + return r.getVmin(); + } + + /** + * Given a point X and an edge AB, return the distance ratio AX / (AX + BX). + * If X happens to be on the line segment AB, this is the fraction "t" such + * that X == Interpolate(A, B, t). Requires that A and B are distinct. + */ + public static double getDistanceFraction(S2Point x, S2Point a0, S2Point a1) { + Preconditions.checkArgument(!a0.equals(a1)); + double d0 = x.angle(a0); + double d1 = x.angle(a1); + return d0 / (d0 + d1); + } + + /** + * Return the minimum distance from X to any point on the edge AB. The result + * is very accurate for small distances but may have some numerical error if + * the distance is large (approximately Pi/2 or greater). The case A == B is + * handled correctly. Note: x, a and b must be of unit length. Throws + * IllegalArgumentException if this is not the case. + */ + public static S1Angle getDistance(S2Point x, S2Point a, S2Point b) { + return getDistance(x, a, b, S2.robustCrossProd(a, b)); + } + + /** + * A slightly more efficient version of getDistance() where the cross product + * of the two endpoints has been precomputed. The cross product does not need + * to be normalized, but should be computed using S2.robustCrossProd() for the + * most accurate results. + */ + public static S1Angle getDistance(S2Point x, S2Point a, S2Point b, S2Point aCrossB) { + Preconditions.checkArgument(S2.isUnitLength(x)); + Preconditions.checkArgument(S2.isUnitLength(a)); + Preconditions.checkArgument(S2.isUnitLength(b)); + + // There are three cases. If X is located in the spherical wedge defined by + // A, B, and the axis A x B, then the closest point is on the segment AB. + // Otherwise the closest point is either A or B; the dividing line between + // these two cases is the great circle passing through (A x B) and the + // midpoint of AB. + + if (S2.simpleCCW(aCrossB, a, x) && S2.simpleCCW(x, b, aCrossB)) { + // The closest point to X lies on the segment AB. We compute the distance + // to the corresponding great circle. The result is accurate for small + // distances but not necessarily for large distances (approaching Pi/2). + + double sinDist = Math.abs(x.dotProd(aCrossB)) / aCrossB.norm(); + return S1Angle.radians(Math.asin(Math.min(1.0, sinDist))); + } + + // Otherwise, the closest point is either A or B. The cheapest method is + // just to compute the minimum of the two linear (as opposed to spherical) + // distances and convert the result to an angle. Again, this method is + // accurate for small but not large distances (approaching Pi). + + double linearDist2 = Math.min(S2Point.minus(x, a).norm2(), S2Point.minus(x, b).norm2()); + return S1Angle.radians(2 * Math.asin(Math.min(1.0, 0.5 * Math.sqrt(linearDist2)))); + } + + /** + * Returns the point on edge AB closest to X. x, a and b must be of unit + * length. Throws IllegalArgumentException if this is not the case. + * + */ + public static S2Point getClosestPoint(S2Point x, S2Point a, S2Point b) { + Preconditions.checkArgument(S2.isUnitLength(x)); + Preconditions.checkArgument(S2.isUnitLength(a)); + Preconditions.checkArgument(S2.isUnitLength(b)); + + S2Point crossProd = S2.robustCrossProd(a, b); + // Find the closest point to X along the great circle through AB. + S2Point p = S2Point.minus(x, S2Point.mul(crossProd, x.dotProd(crossProd) / crossProd.norm2())); + + // If p is on the edge AB, then it's the closest point. + if (S2.simpleCCW(crossProd, a, p) && S2.simpleCCW(p, b, crossProd)) { + return S2Point.normalize(p); + } + // Otherwise, the closest point is either A or B. + return S2Point.minus(x, a).norm2() <= S2Point.minus(x, b).norm2() ? a : b; + } + + /** Constructor is private so that this class is never instantiated. */ + private S2EdgeUtil() { + } +} diff --git a/src/com/google/common/geometry/S2LatLng.java b/src/com/google/common/geometry/S2LatLng.java new file mode 100644 index 0000000..f7bb10b --- /dev/null +++ b/src/com/google/common/geometry/S2LatLng.java @@ -0,0 +1,291 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * This class represents a point on the unit sphere as a pair of + * latitude-longitude coordinates. Like the rest of the "geometry" package, the + * intent is to represent spherical geometry as a mathematical abstraction, so + * functions that are specifically related to the Earth's geometry (e.g. + * easting/northing conversions) should be put elsewhere. + * + */ +public strictfp class S2LatLng { + + /** + * Approximate "effective" radius of the Earth in meters. + */ + public static final double EARTH_RADIUS_METERS = 6367000.0; + + /** The center point the lat/lng coordinate system. */ + public static final S2LatLng CENTER = new S2LatLng(0.0, 0.0); + + private final double latRadians; + private final double lngRadians; + + public static S2LatLng fromRadians(double latRadians, double lngRadians) { + return new S2LatLng(latRadians, lngRadians); + } + + public static S2LatLng fromDegrees(double latDegrees, double lngDegrees) { + return new S2LatLng(S1Angle.degrees(latDegrees), S1Angle.degrees(lngDegrees)); + } + + public static S2LatLng fromE5(long latE5, long lngE5) { + return new S2LatLng(S1Angle.e5(latE5), S1Angle.e5(lngE5)); + } + + public static S2LatLng fromE6(long latE6, long lngE6) { + return new S2LatLng(S1Angle.e6(latE6), S1Angle.e6(lngE6)); + } + + public static S2LatLng fromE7(long latE7, long lngE7) { + return new S2LatLng(S1Angle.e7(latE7), S1Angle.e7(lngE7)); + } + + public static S1Angle latitude(S2Point p) { + // We use atan2 rather than asin because the input vector is not necessarily + // unit length, and atan2 is much more accurate than asin near the poles. + return S1Angle.radians( + Math.atan2(p.get(2), Math.sqrt(p.get(0) * p.get(0) + p.get(1) * p.get(1)))); + } + + public static S1Angle longitude(S2Point p) { + // Note that atan2(0, 0) is defined to be zero. + return S1Angle.radians(Math.atan2(p.get(1), p.get(0))); + } + + /** This is internal to avoid ambiguity about which units are expected. */ + private S2LatLng(double latRadians, double lngRadians) { + this.latRadians = latRadians; + this.lngRadians = lngRadians; + } + + /** + * Basic constructor. The latitude and longitude must be within the ranges + * allowed by is_valid() below. + * + * TODO(dbeaumont): Make this a static factory method (fromLatLng() ?). + */ + public S2LatLng(S1Angle lat, S1Angle lng) { + this(lat.radians(), lng.radians()); + } + + /** + * Default constructor for convenience when declaring arrays, etc. + * + * TODO(dbeaumont): Remove the default constructor (just use CENTER). + */ + public S2LatLng() { + this(0, 0); + } + + /** + * Convert a point (not necessarily normalized) to an S2LatLng. + * + * TODO(dbeaumont): Make this a static factory method (fromPoint() ?). + */ + public S2LatLng(S2Point p) { + this(Math.atan2(p.z, Math.sqrt(p.x * p.x + p.y * p.y)), Math.atan2(p.y, p.x)); + // The latitude and longitude are already normalized. We use atan2 to + // compute the latitude because the input vector is not necessarily unit + // length, and atan2 is much more accurate than asin near the poles. + // Note that atan2(0, 0) is defined to be zero. + } + + /** Returns the latitude of this point as a new S1Angle. */ + public S1Angle lat() { + return S1Angle.radians(latRadians); + } + + /** Returns the latitude of this point as radians. */ + public double latRadians() { + return latRadians; + } + + /** Returns the latitude of this point as degrees. */ + public double latDegrees() { + return 180.0 / Math.PI * latRadians; + } + + /** Returns the longitude of this point as a new S1Angle. */ + public S1Angle lng() { + return S1Angle.radians(lngRadians); + } + + /** Returns the longitude of this point as radians. */ + public double lngRadians() { + return lngRadians; + } + + /** Returns the longitude of this point as degrees. */ + public double lngDegrees() { + return 180.0 / Math.PI * lngRadians; + } + + /** + * Return true if the latitude is between -90 and 90 degrees inclusive and the + * longitude is between -180 and 180 degrees inclusive. + */ + public boolean isValid() { + return Math.abs(lat().radians()) <= S2.M_PI_2 && Math.abs(lng().radians()) <= S2.M_PI; + } + + /** + * Returns a new S2LatLng based on this instance for which {@link #isValid()} + * will be {@code true}. + * <ul> + * <li>Latitude is clipped to the range {@code [-90, 90]} + * <li>Longitude is normalized to be in the range {@code [-180, 180]} + * </ul> + * <p>If the current point is valid then the returned point will have the same + * coordinates. + */ + public S2LatLng normalized() { + // drem(x, 2 * S2.M_PI) reduces its argument to the range + // [-S2.M_PI, S2.M_PI] inclusive, which is what we want here. + return new S2LatLng(Math.max(-S2.M_PI_2, Math.min(S2.M_PI_2, lat().radians())), + Math.IEEEremainder(lng().radians(), 2 * S2.M_PI)); + } + + // Clamps the latitude to the range [-90, 90] degrees, and adds or subtracts + // a multiple of 360 degrees to the longitude if necessary to reduce it to + // the range [-180, 180]. + + /** Convert an S2LatLng to the equivalent unit-length vector (S2Point). */ + public S2Point toPoint() { + double phi = lat().radians(); + double theta = lng().radians(); + double cosphi = Math.cos(phi); + return new S2Point(Math.cos(theta) * cosphi, Math.sin(theta) * cosphi, Math.sin(phi)); + } + + /** + * Return the distance (measured along the surface of the sphere) to the given + * point. + */ + public S1Angle getDistance(final S2LatLng o) { + // This implements the Haversine formula, which is numerically stable for + // small distances but only gets about 8 digits of precision for very large + // distances (e.g. antipodal points). Note that 8 digits is still accurate + // to within about 10cm for a sphere the size of the Earth. + // + // This could be fixed with another sin() and cos() below, but at that point + // you might as well just convert both arguments to S2Points and compute the + // distance that way (which gives about 15 digits of accuracy for all + // distances). + + double lat1 = lat().radians(); + double lat2 = o.lat().radians(); + double lng1 = lng().radians(); + double lng2 = o.lng().radians(); + double dlat = Math.sin(0.5 * (lat2 - lat1)); + double dlng = Math.sin(0.5 * (lng2 - lng1)); + double x = dlat * dlat + dlng * dlng * Math.cos(lat1) * Math.cos(lat2); + return S1Angle.radians(2 * Math.atan2(Math.sqrt(x), Math.sqrt(Math.max(0.0, 1.0 - x)))); + // Return the distance (measured along the surface of the sphere) to the + // given S2LatLng. This is mathematically equivalent to: + // + // S1Angle::FromRadians(ToPoint().Angle(o.ToPoint()) + // + // but this implementation is slightly more efficient. + } + + /** + * Returns the surface distance to the given point assuming a constant radius. + */ + public double getDistance(final S2LatLng o, double radius) { + // TODO(dbeaumont): Maybe check that radius >= 0 ? + return getDistance(o).radians() * radius; + } + + /** + * Returns the surface distance to the given point assuming the default Earth + * radius of {@link #EARTH_RADIUS_METERS}. + */ + public double getEarthDistance(final S2LatLng o) { + return getDistance(o, EARTH_RADIUS_METERS); + } + + /** + * Adds the given point to this point. + * Note that there is no guarantee that the new point will be <em>valid</em>. + */ + public S2LatLng add(final S2LatLng o) { + return new S2LatLng(latRadians + o.latRadians, lngRadians + o.lngRadians); + } + + /** + * Subtracts the given point from this point. + * Note that there is no guarantee that the new point will be <em>valid</em>. + */ + public S2LatLng sub(final S2LatLng o) { + return new S2LatLng(latRadians - o.latRadians, lngRadians - o.lngRadians); + } + + /** + * Scales this point by the given scaling factor. + * Note that there is no guarantee that the new point will be <em>valid</em>. + */ + public S2LatLng mul(final double m) { + // TODO(dbeaumont): Maybe check that m >= 0 ? + return new S2LatLng(latRadians * m, lngRadians * m); + } + + @Override + public boolean equals(Object that) { + if (that instanceof S2LatLng) { + S2LatLng o = (S2LatLng) that; + return (latRadians == o.latRadians) && (lngRadians == o.lngRadians); + } + return false; + } + + @Override + public int hashCode() { + long value = 17; + value += 37 * value + Double.doubleToLongBits(latRadians); + value += 37 * value + Double.doubleToLongBits(lngRadians); + return (int) (value ^ (value >>> 32)); + } + + /** + * Returns true if both the latitude and longitude of the given point are + * within {@code maxError} radians of this point. + */ + public boolean approxEquals(S2LatLng o, double maxError) { + return (Math.abs(latRadians - o.latRadians) < maxError) + && (Math.abs(lngRadians - o.lngRadians) < maxError); + } + + /** + * Returns true if the given point is within {@code 1e-9} radians of this + * point. This corresponds to a distance of less than {@code 1cm} at the + * surface of the Earth. + */ + public boolean approxEquals(S2LatLng o) { + return approxEquals(o, 1e-9); + } + + @Override + public String toString() { + return "(" + latRadians + ", " + lngRadians + ")"; + } + + public String toStringDegrees() { + return "(" + latDegrees() + ", " + lngDegrees() + ")"; + } +} diff --git a/src/com/google/common/geometry/S2LatLngRect.java b/src/com/google/common/geometry/S2LatLngRect.java new file mode 100644 index 0000000..cb9ad0c --- /dev/null +++ b/src/com/google/common/geometry/S2LatLngRect.java @@ -0,0 +1,728 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.base.Preconditions; + +/** + * An S2LatLngRect represents a latitude-longitude rectangle. It is capable of + * representing the empty and full rectangles as well as single points. + * + */ + +public strictfp class S2LatLngRect implements S2Region { + + private final R1Interval lat; + private final S1Interval lng; + + /** + * Construct a rectangle from minimum and maximum latitudes and longitudes. If + * lo.lng() > hi.lng(), the rectangle spans the 180 degree longitude line. + */ + public S2LatLngRect(final S2LatLng lo, final S2LatLng hi) { + lat = new R1Interval(lo.lat().radians(), hi.lat().radians()); + lng = new S1Interval(lo.lng().radians(), hi.lng().radians()); + // assert (isValid()); + } + + /** Construct a rectangle from latitude and longitude intervals. */ + public S2LatLngRect(R1Interval lat, S1Interval lng) { + this.lat = lat; + this.lng = lng; + // assert (isValid()); + } + + /** The canonical empty rectangle */ + public static S2LatLngRect empty() { + return new S2LatLngRect(R1Interval.empty(), S1Interval.empty()); + } + + /** The canonical full rectangle. */ + public static S2LatLngRect full() { + return new S2LatLngRect(fullLat(), fullLng()); + } + + /** The full allowable range of latitudes. */ + public static R1Interval fullLat() { + return new R1Interval(-S2.M_PI_2, S2.M_PI_2); + } + + /** + * The full allowable range of longitudes. + */ + public static S1Interval fullLng() { + return S1Interval.full(); + } + + /** + * Construct a rectangle from a center point (in lat-lng space) and size in + * each dimension. If size.lng() is greater than 360 degrees it is clamped, + * and latitudes greater than +/- 90 degrees are also clamped. So for example, + * FromCenterSize((80,170),(20,20)) -> (lo=(60,150),hi=(90,-170)). + */ + public static S2LatLngRect fromCenterSize(S2LatLng center, S2LatLng size) { + return fromPoint(center).expanded(size.mul(0.5)); + } + + /** Convenience method to construct a rectangle containing a single point. */ + public static S2LatLngRect fromPoint(S2LatLng p) { + // assert (p.isValid()); + return new S2LatLngRect(p, p); + } + + /** + * Convenience method to construct the minimal bounding rectangle containing + * the two given points. This is equivalent to starting with an empty + * rectangle and calling AddPoint() twice. Note that it is different than the + * S2LatLngRect(lo, hi) constructor, where the first point is always used as + * the lower-left corner of the resulting rectangle. + */ + public static S2LatLngRect fromPointPair(S2LatLng p1, S2LatLng p2) { + // assert (p1.isValid() && p2.isValid()); + return new S2LatLngRect(R1Interval.fromPointPair(p1.lat().radians(), p2 + .lat().radians()), S1Interval.fromPointPair(p1.lng().radians(), p2.lng() + .radians())); + } + + /** + * Return a latitude-longitude rectangle that contains the edge from "a" to + * "b". Both points must be unit-length. Note that the bounding rectangle of + * an edge can be larger than the bounding rectangle of its endpoints. + */ + public static S2LatLngRect fromEdge(S2Point a, S2Point b) { + // assert (S2.isUnitLength(a) && S2.isUnitLength(b)); + S2LatLngRect r = fromPointPair(new S2LatLng(a), new S2LatLng(b)); + + // Check whether the min/max latitude occurs in the edge interior. + // We find the normal to the plane containing AB, and then a vector "dir" in + // this plane that also passes through the equator. We use RobustCrossProd + // to ensure that the edge normal is accurate even when the two points are + // very close together. + S2Point ab = S2.robustCrossProd(a, b); + S2Point dir = S2Point.crossProd(ab, new S2Point(0, 0, 1)); + double da = dir.dotProd(a); + double db = dir.dotProd(b); + if (da * db >= 0) { + // Minimum and maximum latitude are attained at the vertices. + return r; + } + // Minimum/maximum latitude occurs in the edge interior. This affects the + // latitude bounds but not the longitude bounds. + double absLat = Math.acos(Math.abs(ab.z / ab.norm())); + if (da < 0) { + return new S2LatLngRect(new R1Interval(r.lat().lo(), absLat), r.lng()); + } else { + return new S2LatLngRect(new R1Interval(-absLat, r.lat().hi()), r.lng()); + } + } + + /** + * Return true if the rectangle is valid, which essentially just means that + * the latitude bounds do not exceed Pi/2 in absolute value and the longitude + * bounds do not exceed Pi in absolute value. + * + */ + public boolean isValid() { + // The lat/lng ranges must either be both empty or both non-empty. + return (Math.abs(lat.lo()) <= S2.M_PI_2 && Math.abs(lat.hi()) <= S2.M_PI_2 + && lng.isValid() && lat.isEmpty() == lng.isEmpty()); + } + + // Accessor methods. + public S1Angle latLo() { + return S1Angle.radians(lat.lo()); + } + + public S1Angle latHi() { + return S1Angle.radians(lat.hi()); + } + + public S1Angle lngLo() { + return S1Angle.radians(lng.lo()); + } + + public S1Angle lngHi() { + return S1Angle.radians(lng.hi()); + } + + public R1Interval lat() { + return lat; + } + + public S1Interval lng() { + return lng; + } + + public S2LatLng lo() { + return new S2LatLng(latLo(), lngLo()); + } + + public S2LatLng hi() { + return new S2LatLng(latHi(), lngHi()); + } + + /** + * Return true if the rectangle is empty, i.e. it contains no points at all. + */ + public boolean isEmpty() { + return lat.isEmpty(); + } + + // Return true if the rectangle is full, i.e. it contains all points. + public boolean isFull() { + return lat.equals(fullLat()) && lng.isFull(); + } + + /** + * Return true if lng_.lo() > lng_.hi(), i.e. the rectangle crosses the 180 + * degree latitude line. + */ + public boolean isInverted() { + return lng.isInverted(); + } + + /** Return the k-th vertex of the rectangle (k = 0,1,2,3) in CCW order. */ + public S2LatLng getVertex(int k) { + // Twiddle bits to return the points in CCW order (SW, SE, NE, NW). + return S2LatLng.fromRadians(lat.bound(k >> 1), lng.bound((k >> 1) + ^ (k & 1))); + } + + /** + * Return the center of the rectangle in latitude-longitude space (in general + * this is not the center of the region on the sphere). + */ + public S2LatLng getCenter() { + return S2LatLng.fromRadians(lat.getCenter(), lng.getCenter()); + } + + /** + * Return the minimum distance (measured along the surface of the sphere) + * from a given point to the rectangle (both its boundary and its interior). + * The latLng must be valid. + */ + public S1Angle getDistance(S2LatLng p) { + // The algorithm here is the same as in getDistance(S2LagLngRect), only + // with simplified calculations. + S2LatLngRect a = this; + + Preconditions.checkState(!a.isEmpty()); + Preconditions.checkArgument(p.isValid()); + + if (a.lng().contains(p.lng().radians())) { + return S1Angle.radians(Math.max(0.0, Math.max(p.lat().radians() - a.lat().hi(), + a.lat().lo() - p.lat().radians()))); + } + + S1Interval interval = new S1Interval(a.lng().hi(), a.lng().complement().getCenter()); + double aLng = a.lng().lo(); + if (interval.contains(p.lng().radians())) { + aLng = a.lng().hi(); + } + + S2Point lo = S2LatLng.fromRadians(a.lat().lo(), aLng).toPoint(); + S2Point hi = S2LatLng.fromRadians(a.lat().hi(), aLng).toPoint(); + S2Point loCrossHi = + S2LatLng.fromRadians(0, aLng - S2.M_PI_2).normalized().toPoint(); + return S2EdgeUtil.getDistance(p.toPoint(), lo, hi, loCrossHi); + } + + /** + * Return the minimum distance (measured along the surface of the sphere) to + * the given S2LatLngRect. Both S2LatLngRects must be non-empty. + */ + public S1Angle getDistance(S2LatLngRect other) { + S2LatLngRect a = this; + S2LatLngRect b = other; + + Preconditions.checkState(!a.isEmpty()); + Preconditions.checkArgument(!b.isEmpty()); + + // First, handle the trivial cases where the longitude intervals overlap. + if (a.lng().intersects(b.lng())) { + if (a.lat().intersects(b.lat())) { + return S1Angle.radians(0); // Intersection between a and b. + } + + // We found an overlap in the longitude interval, but not in the latitude + // interval. This means the shortest path travels along some line of + // longitude connecting the high-latitude of the lower rect with the + // low-latitude of the higher rect. + S1Angle lo, hi; + if (a.lat().lo() > b.lat().hi()) { + lo = b.latHi(); + hi = a.latLo(); + } else { + lo = a.latHi(); + hi = b.latLo(); + } + return S1Angle.radians(hi.radians() - lo.radians()); + } + + // The longitude intervals don't overlap. In this case, the closest points + // occur somewhere on the pair of longitudinal edges which are nearest in + // longitude-space. + S1Angle aLng, bLng; + S1Interval loHi = S1Interval.fromPointPair(a.lng().lo(), b.lng().hi()); + S1Interval hiLo = S1Interval.fromPointPair(a.lng().hi(), b.lng().lo()); + if (loHi.getLength() < hiLo.getLength()) { + aLng = a.lngLo(); + bLng = b.lngHi(); + } else { + aLng = a.lngHi(); + bLng = b.lngLo(); + } + + // The shortest distance between the two longitudinal segments will include + // at least one segment endpoint. We could probably narrow this down further + // to a single point-edge distance by comparing the relative latitudes of the + // endpoints, but for the sake of clarity, we'll do all four point-edge + // distance tests. + S2Point aLo = new S2LatLng(a.latLo(), aLng).toPoint(); + S2Point aHi = new S2LatLng(a.latHi(), aLng).toPoint(); + S2Point aLoCrossHi = + S2LatLng.fromRadians(0, aLng.radians() - S2.M_PI_2).normalized().toPoint(); + S2Point bLo = new S2LatLng(b.latLo(), bLng).toPoint(); + S2Point bHi = new S2LatLng(b.latHi(), bLng).toPoint(); + S2Point bLoCrossHi = + S2LatLng.fromRadians(0, bLng.radians() - S2.M_PI_2).normalized().toPoint(); + + return S1Angle.min(S2EdgeUtil.getDistance(aLo, bLo, bHi, bLoCrossHi), + S1Angle.min(S2EdgeUtil.getDistance(aHi, bLo, bHi, bLoCrossHi), + S1Angle.min(S2EdgeUtil.getDistance(bLo, aLo, aHi, aLoCrossHi), + S2EdgeUtil.getDistance(bHi, aLo, aHi, aLoCrossHi)))); + } + + /** + * Return the width and height of this rectangle in latitude-longitude space. + * Empty rectangles have a negative width and height. + */ + public S2LatLng getSize() { + return S2LatLng.fromRadians(lat.getLength(), lng.getLength()); + } + + /** + * More efficient version of Contains() that accepts a S2LatLng rather than an + * S2Point. + */ + public boolean contains(S2LatLng ll) { + // assert (ll.isValid()); + return (lat.contains(ll.lat().radians()) && lng.contains(ll.lng() + .radians())); + + } + + /** + * Return true if and only if the given point is contained in the interior of + * the region (i.e. the region excluding its boundary). The point 'p' does not + * need to be normalized. + */ + public boolean interiorContains(S2Point p) { + return interiorContains(new S2LatLng(p)); + } + + /** + * More efficient version of InteriorContains() that accepts a S2LatLng rather + * than an S2Point. + */ + public boolean interiorContains(S2LatLng ll) { + // assert (ll.isValid()); + return (lat.interiorContains(ll.lat().radians()) && lng + .interiorContains(ll.lng().radians())); + } + + /** + * Return true if and only if the rectangle contains the given other + * rectangle. + */ + public boolean contains(S2LatLngRect other) { + return lat.contains(other.lat) && lng.contains(other.lng); + } + + /** + * Return true if and only if the interior of this rectangle contains all + * points of the given other rectangle (including its boundary). + */ + public boolean interiorContains(S2LatLngRect other) { + return (lat.interiorContains(other.lat) && lng + .interiorContains(other.lng)); + } + + /** Return true if this rectangle and the given other rectangle have any + points in common. */ + public boolean intersects(S2LatLngRect other) { + return lat.intersects(other.lat) && lng.intersects(other.lng); + } + + /** + * Returns true if this rectangle intersects the given cell. (This is an exact + * test and may be fairly expensive, see also MayIntersect below.) + */ + public boolean intersects(S2Cell cell) { + // First we eliminate the cases where one region completely contains the + // other. Once these are disposed of, then the regions will intersect + // if and only if their boundaries intersect. + + if (isEmpty()) { + return false; + } + if (contains(cell.getCenter())) { + return true; + } + if (cell.contains(getCenter().toPoint())) { + return true; + } + + // Quick rejection test (not required for correctness). + if (!intersects(cell.getRectBound())) { + return false; + } + + // Now check whether the boundaries intersect. Unfortunately, a + // latitude-longitude rectangle does not have straight edges -- two edges + // are curved, and at least one of them is concave. + + // Precompute the cell vertices as points and latitude-longitudes. + S2Point[] cellV = new S2Point[4]; + S2LatLng[] cellLl = new S2LatLng[4]; + for (int i = 0; i < 4; ++i) { + cellV[i] = cell.getVertex(i); // Must be normalized. + cellLl[i] = new S2LatLng(cellV[i]); + if (contains(cellLl[i])) { + return true; // Quick acceptance test. + } + } + + for (int i = 0; i < 4; ++i) { + S1Interval edgeLng = S1Interval.fromPointPair( + cellLl[i].lng().radians(), cellLl[(i + 1) & 3].lng().radians()); + if (!lng.intersects(edgeLng)) { + continue; + } + + final S2Point a = cellV[i]; + final S2Point b = cellV[(i + 1) & 3]; + if (edgeLng.contains(lng.lo())) { + if (intersectsLngEdge(a, b, lat, lng.lo())) { + return true; + } + } + if (edgeLng.contains(lng.hi())) { + if (intersectsLngEdge(a, b, lat, lng.hi())) { + return true; + } + } + if (intersectsLatEdge(a, b, lat.lo(), lng)) { + return true; + } + if (intersectsLatEdge(a, b, lat.hi(), lng)) { + return true; + } + } + return false; + } + + /** + * Return true if and only if the interior of this rectangle intersects any + * point (including the boundary) of the given other rectangle. + */ + public boolean interiorIntersects(S2LatLngRect other) { + return (lat.interiorIntersects(other.lat) && lng + .interiorIntersects(other.lng)); + } + + public S2LatLngRect addPoint(S2Point p) { + return addPoint(new S2LatLng(p)); + } + + // Increase the size of the bounding rectangle to include the given point. + // The rectangle is expanded by the minimum amount possible. + public S2LatLngRect addPoint(S2LatLng ll) { + // assert (ll.isValid()); + R1Interval newLat = lat.addPoint(ll.lat().radians()); + S1Interval newLng = lng.addPoint(ll.lng().radians()); + return new S2LatLngRect(newLat, newLng); + } + + /** + * Return a rectangle that contains all points whose latitude distance from + * this rectangle is at most margin.lat(), and whose longitude distance from + * this rectangle is at most margin.lng(). In particular, latitudes are + * clamped while longitudes are wrapped. Note that any expansion of an empty + * interval remains empty, and both components of the given margin must be + * non-negative. + * + * NOTE: If you are trying to grow a rectangle by a certain *distance* on the + * sphere (e.g. 5km), use the ConvolveWithCap() method instead. + */ + public S2LatLngRect expanded(S2LatLng margin) { + // assert (margin.lat().radians() >= 0 && margin.lng().radians() >= 0); + if (isEmpty()) { + return this; + } + return new S2LatLngRect(lat.expanded(margin.lat().radians()).intersection( + fullLat()), lng.expanded(margin.lng().radians())); + } + + /** + * Return the smallest rectangle containing the union of this rectangle and + * the given rectangle. + */ + public S2LatLngRect union(S2LatLngRect other) { + return new S2LatLngRect(lat.union(other.lat), lng.union(other.lng)); + } + + /** + * Return the smallest rectangle containing the intersection of this rectangle + * and the given rectangle. Note that the region of intersection may consist + * of two disjoint rectangles, in which case a single rectangle spanning both + * of them is returned. + */ + public S2LatLngRect intersection(S2LatLngRect other) { + R1Interval intersectLat = lat.intersection(other.lat); + S1Interval intersectLng = lng.intersection(other.lng); + if (intersectLat.isEmpty() || intersectLng.isEmpty()) { + // The lat/lng ranges must either be both empty or both non-empty. + return empty(); + } + return new S2LatLngRect(intersectLat, intersectLng); + } + + /** + * Return a rectangle that contains the convolution of this rectangle with a + * cap of the given angle. This expands the rectangle by a fixed distance (as + * opposed to growing the rectangle in latitude-longitude space). The returned + * rectangle includes all points whose minimum distance to the original + * rectangle is at most the given angle. + */ + public S2LatLngRect convolveWithCap(S1Angle angle) { + // The most straightforward approach is to build a cap centered on each + // vertex and take the union of all the bounding rectangles (including the + // original rectangle; this is necessary for very large rectangles). + + // Optimization: convert the angle to a height exactly once. + S2Cap cap = S2Cap.fromAxisAngle(new S2Point(1, 0, 0), angle); + + S2LatLngRect r = this; + for (int k = 0; k < 4; ++k) { + S2Cap vertexCap = S2Cap.fromAxisHeight(getVertex(k).toPoint(), cap + .height()); + r = r.union(vertexCap.getRectBound()); + } + return r; + } + + /** Return the surface area of this rectangle on the unit sphere. */ + public double area() { + if (isEmpty()) { + return 0; + } + + // This is the size difference of the two spherical caps, multiplied by + // the longitude ratio. + return lng().getLength() * Math.abs(Math.sin(latHi().radians()) - Math.sin(latLo().radians())); + } + + /** Return true if two rectangles contains the same set of points. */ + @Override + public boolean equals(Object that) { + if (!(that instanceof S2LatLngRect)) { + return false; + } + S2LatLngRect otherRect = (S2LatLngRect) that; + return lat().equals(otherRect.lat()) && lng().equals(otherRect.lng()); + } + + /** + * Return true if the latitude and longitude intervals of the two rectangles + * are the same up to the given tolerance (see r1interval.h and s1interval.h + * for details). + */ + public boolean approxEquals(S2LatLngRect other, double maxError) { + return (lat.approxEquals(other.lat, maxError) && lng.approxEquals( + other.lng, maxError)); + } + + public boolean approxEquals(S2LatLngRect other) { + return approxEquals(other, 1e-15); + } + + @Override + public int hashCode() { + int value = 17; + value = 37 * value + lat.hashCode(); + return (37 * value + lng.hashCode()); + } + + // ////////////////////////////////////////////////////////////////////// + // S2Region interface (see {@code S2Region} for details): + + @Override + public S2Region clone() { + return new S2LatLngRect(this.lo(), this.hi()); + } + + @Override + public S2Cap getCapBound() { + // We consider two possible bounding caps, one whose axis passes + // through the center of the lat-long rectangle and one whose axis + // is the north or south pole. We return the smaller of the two caps. + + if (isEmpty()) { + return S2Cap.empty(); + } + + double poleZ, poleAngle; + if (lat.lo() + lat.hi() < 0) { + // South pole axis yields smaller cap. + poleZ = -1; + poleAngle = S2.M_PI_2 + lat.hi(); + } else { + poleZ = 1; + poleAngle = S2.M_PI_2 - lat.lo(); + } + S2Cap poleCap = S2Cap.fromAxisAngle(new S2Point(0, 0, poleZ), S1Angle + .radians(poleAngle)); + + // For bounding rectangles that span 180 degrees or less in longitude, the + // maximum cap size is achieved at one of the rectangle vertices. For + // rectangles that are larger than 180 degrees, we punt and always return a + // bounding cap centered at one of the two poles. + double lngSpan = lng.hi() - lng.lo(); + if (Math.IEEEremainder(lngSpan, 2 * S2.M_PI) >= 0) { + if (lngSpan < 2 * S2.M_PI) { + S2Cap midCap = S2Cap.fromAxisAngle(getCenter().toPoint(), S1Angle + .radians(0)); + for (int k = 0; k < 4; ++k) { + midCap = midCap.addPoint(getVertex(k).toPoint()); + } + if (midCap.height() < poleCap.height()) { + return midCap; + } + } + } + return poleCap; + } + + @Override + public S2LatLngRect getRectBound() { + return this; + } + + @Override + public boolean contains(S2Cell cell) { + // A latitude-longitude rectangle contains a cell if and only if it contains + // the cell's bounding rectangle. (This is an exact test.) + return contains(cell.getRectBound()); + } + + /** + * This test is cheap but is NOT exact. Use Intersects() if you want a more + * accurate and more expensive test. Note that when this method is used by an + * S2RegionCoverer, the accuracy isn't all that important since if a cell may + * intersect the region then it is subdivided, and the accuracy of this method + * goes up as the cells get smaller. + */ + @Override + public boolean mayIntersect(S2Cell cell) { + // This test is cheap but is NOT exact (see s2latlngrect.h). + return intersects(cell.getRectBound()); + } + + /** The point 'p' does not need to be normalized. */ + public boolean contains(S2Point p) { + return contains(new S2LatLng(p)); + } + + /** + * Return true if the edge AB intersects the given edge of constant longitude. + */ + private static boolean intersectsLngEdge(S2Point a, S2Point b, + R1Interval lat, double lng) { + // Return true if the segment AB intersects the given edge of constant + // longitude. The nice thing about edges of constant longitude is that + // they are straight lines on the sphere (geodesics). + + return S2.simpleCrossing(a, b, S2LatLng.fromRadians(lat.lo(), lng) + .toPoint(), S2LatLng.fromRadians(lat.hi(), lng).toPoint()); + } + + /** + * Return true if the edge AB intersects the given edge of constant latitude. + */ + private static boolean intersectsLatEdge(S2Point a, S2Point b, double lat, + S1Interval lng) { + // Return true if the segment AB intersects the given edge of constant + // latitude. Unfortunately, lines of constant latitude are curves on + // the sphere. They can intersect a straight edge in 0, 1, or 2 points. + // assert (S2.isUnitLength(a) && S2.isUnitLength(b)); + + // First, compute the normal to the plane AB that points vaguely north. + S2Point z = S2Point.normalize(S2.robustCrossProd(a, b)); + if (z.z < 0) { + z = S2Point.neg(z); + } + + // Extend this to an orthonormal frame (x,y,z) where x is the direction + // where the great circle through AB achieves its maximium latitude. + S2Point y = S2Point.normalize(S2.robustCrossProd(z, new S2Point(0, 0, 1))); + S2Point x = S2Point.crossProd(y, z); + // assert (S2.isUnitLength(x) && x.z >= 0); + + // Compute the angle "theta" from the x-axis (in the x-y plane defined + // above) where the great circle intersects the given line of latitude. + double sinLat = Math.sin(lat); + if (Math.abs(sinLat) >= x.z) { + return false; // The great circle does not reach the given latitude. + } + // assert (x.z > 0); + double cosTheta = sinLat / x.z; + double sinTheta = Math.sqrt(1 - cosTheta * cosTheta); + double theta = Math.atan2(sinTheta, cosTheta); + + // The candidate intersection points are located +/- theta in the x-y + // plane. For an intersection to be valid, we need to check that the + // intersection point is contained in the interior of the edge AB and + // also that it is contained within the given longitude interval "lng". + + // Compute the range of theta values spanned by the edge AB. + S1Interval abTheta = S1Interval.fromPointPair(Math.atan2( + a.dotProd(y), a.dotProd(x)), Math.atan2(b.dotProd(y), b.dotProd(x))); + + if (abTheta.contains(theta)) { + // Check if the intersection point is also in the given "lng" interval. + S2Point isect = S2Point.add(S2Point.mul(x, cosTheta), S2Point.mul(y, + sinTheta)); + if (lng.contains(Math.atan2(isect.y, isect.x))) { + return true; + } + } + if (abTheta.contains(-theta)) { + // Check if the intersection point is also in the given "lng" interval. + S2Point intersection = S2Point.sub(S2Point.mul(x, cosTheta), S2Point.mul(y, sinTheta)); + if (lng.contains(Math.atan2(intersection.y, intersection.x))) { + return true; + } + } + return false; + + } + + @Override + public String toString() { + return "[Lo=" + lo() + ", Hi=" + hi() + "]"; + } +} diff --git a/src/com/google/common/geometry/S2Loop.java b/src/com/google/common/geometry/S2Loop.java new file mode 100644 index 0000000..3e449d4 --- /dev/null +++ b/src/com/google/common/geometry/S2Loop.java @@ -0,0 +1,916 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; +import com.google.common.geometry.S2EdgeUtil.EdgeCrosser; + +import java.util.HashMap; +import java.util.List; +import java.util.logging.Logger; + +/** + * + * An S2Loop represents a simple spherical polygon. It consists of a single + * chain of vertices where the first vertex is implicitly connected to the last. + * All loops are defined to have a CCW orientation, i.e. the interior of the + * polygon is on the left side of the edges. This implies that a clockwise loop + * enclosing a small area is interpreted to be a CCW loop enclosing a very large + * area. + * + * Loops are not allowed to have any duplicate vertices (whether adjacent or + * not), and non-adjacent edges are not allowed to intersect. Loops must have at + * least 3 vertices. Although these restrictions are not enforced in optimized + * code, you may get unexpected results if they are violated. + * + * Point containment is defined such that if the sphere is subdivided into + * faces (loops), every point is contained by exactly one face. This implies + * that loops do not necessarily contain all (or any) of their vertices An + * S2LatLngRect represents a latitude-longitude rectangle. It is capable of + * representing the empty and full rectangles as well as single points. + * + */ + +public strictfp class S2Loop implements S2Region, Comparable<S2Loop> { + private static final Logger log = Logger.getLogger(S2Loop.class.getCanonicalName()); + + /** + * Max angle that intersections can be off by and yet still be considered + * colinear. + */ + public static final double MAX_INTERSECTION_ERROR = 1e-15; + + private S2Point[] vertices; + private int numVertices; + + /* + * The index (into "vertices") of the vertex that comes first in the total + * ordering of all vertices in this loop. + */ + private int firstLogicalVertex; + + private S2LatLngRect bound; + private boolean originInside; + private int depth; + + // TODO(kirilll): Get rid of debug mode. Turn it into tests. + public static boolean debugMode = false; + + /** + * Initialize a loop connecting the given vertices. The last vertex is + * implicitly connected to the first. All points should be unit length. Loops + * must have at least 3 vertices. + * + * @param vertices + */ + public S2Loop(final List<S2Point> vertices) { + this.numVertices = vertices.size(); + this.vertices = new S2Point[numVertices]; + this.bound = S2LatLngRect.full(); + this.depth = 0; + + // if (debugMode) { + // assert (isValid(vertices, DEFAULT_MAX_ADJACENT)); + // } + + vertices.toArray(this.vertices); + + // initOrigin() must be called before InitBound() because the latter + // function expects Contains() to work properly. + initOrigin(); + initBound(); + initFirstLogicalVertex(); + } + + /** + * Initialize a loop corresponding to the given cell. + */ + public S2Loop(S2Cell cell) { + this.bound = cell.getRectBound(); + initFromCell(cell); + } + + /** + * Like the constructor above, but assumes that the cell's bounding rectangle + * has been precomputed. + * + * @param cell + * @param bound + */ + public S2Loop(S2Cell cell, S2LatLngRect bound) { + this.bound = bound; + initFromCell(cell); + } + + /** + * Copy constructor. + */ + public S2Loop(S2Loop src) { + this.numVertices = src.numVertices(); + this.vertices = src.vertices.clone(); + this.firstLogicalVertex = src.firstLogicalVertex; + this.bound = src.getRectBound(); + this.originInside = src.originInside; + this.depth = src.depth(); + } + + public int depth() { + return depth; + } + + /** + * The depth of a loop is defined as its nesting level within its containing + * polygon. "Outer shell" loops have depth 0, holes within those loops have + * depth 1, shells within those holes have depth 2, etc. This field is only + * used by the S2Polygon implementation. + * + * @param depth + */ + public void setDepth(int depth) { + this.depth = depth; + } + + /** + * Return true if this loop represents a hole in its containing polygon. + */ + public boolean isHole() { + return (depth & 1) != 0; + } + + /** + * The sign of a loop is -1 if the loop represents a hole in its containing + * polygon, and +1 otherwise. + */ + public int sign() { + return isHole() ? -1 : 1; + } + + public int numVertices() { + return numVertices; + } + + /** + * For convenience, we make two entire copies of the vertex list available: + * vertex(n..2*n-1) is mapped to vertex(0..n-1), where n == numVertices(). + */ + public S2Point vertex(int i) { + Preconditions.checkState(i >= 0 && i < 2 * numVertices, "Invalid vertex index"); + int j = i - numVertices(); + return vertices[(j >= 0) ? j : i]; + } + + /** + * Comparator (needed by Comparable interface) + */ + @Override + public int compareTo(S2Loop other) { + if (numVertices() != other.numVertices()) { + return this.numVertices() - other.numVertices(); + } + // Compare the two loops' vertices, starting with each loop's + // firstLogicalVertex. This allows us to always catch cases where logically + // identical loops have different vertex orderings (e.g. ABCD and BCDA). + int maxVertices = numVertices(); + int iThis = firstLogicalVertex; + int iOther = other.firstLogicalVertex; + for (int i = 0; i < maxVertices; ++i, ++iThis, ++iOther) { + int compare = vertex(iThis).compareTo(other.vertex(iOther)); + if (compare != 0) { + return compare; + } + } + return 0; + } + + /** + * Calculates firstLogicalVertex, the vertex in this loop that comes first in + * a total ordering of all vertices (by way of S2Point's compareTo function). + */ + private void initFirstLogicalVertex() { + int first = 0; + for (int i = 1; i < numVertices; ++i) { + if (vertex(i).compareTo(vertex(first)) < 0) { + first = i; + } + } + firstLogicalVertex = first; + } + + /** + * Return true if the loop area is at most 2*Pi. + */ + public boolean isNormalized() { + // We allow a bit of error so that exact hemispheres are + // considered normalized. + return getArea() <= 2 * S2.M_PI + 1e-14; + } + + /** + * Invert the loop if necessary so that the area enclosed by the loop is at + * most 2*Pi. + */ + public void normalize() { + if (!isNormalized()) { + invert(); + } + } + + /** + * Reverse the order of the loop vertices, effectively complementing the + * region represented by the loop. + */ + public void invert() { + int last = numVertices() - 1; + for (int i = (last - 1) / 2; i >= 0; --i) { + S2Point t = vertices[i]; + vertices[i] = vertices[last - i]; + vertices[last - i] = t; + } + originInside ^= true; + if (bound.lat().lo() > -S2.M_PI_2 && bound.lat().hi() < S2.M_PI_2) { + // The complement of this loop contains both poles. + bound = S2LatLngRect.full(); + } else { + initBound(); + } + initFirstLogicalVertex(); + } + + /** + * Helper method to get area and optionally centroid. + */ + private S2AreaCentroid getAreaCentroid(boolean doCentroid) { + S2Point centroid = null; + // Don't crash even if loop is not well-defined. + if (numVertices() < 3) { + return new S2AreaCentroid(0D, centroid); + } + + // The triangle area calculation becomes numerically unstable as the length + // of any edge approaches 180 degrees. However, a loop may contain vertices + // that are 180 degrees apart and still be valid, e.g. a loop that defines + // the northern hemisphere using four points. We handle this case by using + // triangles centered around an origin that is slightly displaced from the + // first vertex. The amount of displacement is enough to get plenty of + // accuracy for antipodal points, but small enough so that we still get + // accurate areas for very tiny triangles. + // + // Of course, if the loop contains a point that is exactly antipodal from + // our slightly displaced vertex, the area will still be unstable, but we + // expect this case to be very unlikely (i.e. a polygon with two vertices on + // opposite sides of the Earth with one of them displaced by about 2mm in + // exactly the right direction). Note that the approximate point resolution + // using the E7 or S2CellId representation is only about 1cm. + + S2Point origin = vertex(0); + int axis = (origin.largestAbsComponent() + 1) % 3; + double slightlyDisplaced = origin.get(axis) + S2.M_E * 1e-10; + origin = + new S2Point((axis == 0) ? slightlyDisplaced : origin.x, + (axis == 1) ? slightlyDisplaced : origin.y, (axis == 2) ? slightlyDisplaced : origin.z); + origin = S2Point.normalize(origin); + + double areaSum = 0; + S2Point centroidSum = new S2Point(0, 0, 0); + for (int i = 1; i <= numVertices(); ++i) { + areaSum += S2.signedArea(origin, vertex(i - 1), vertex(i)); + if (doCentroid) { + // The true centroid is already premultiplied by the triangle area. + S2Point trueCentroid = S2.trueCentroid(origin, vertex(i - 1), vertex(i)); + centroidSum = S2Point.add(centroidSum, trueCentroid); + } + } + // The calculated area at this point should be between -4*Pi and 4*Pi, + // although it may be slightly larger or smaller than this due to + // numerical errors. + // assert (Math.abs(areaSum) <= 4 * S2.M_PI + 1e-12); + + if (areaSum < 0) { + // If the area is negative, we have computed the area to the right of the + // loop. The area to the left is 4*Pi - (-area). Amazingly, the centroid + // does not need to be changed, since it is the negative of the integral + // of position over the region to the right of the loop. This is the same + // as the integral of position over the region to the left of the loop, + // since the integral of position over the entire sphere is (0, 0, 0). + areaSum += 4 * S2.M_PI; + } + // The loop's sign() does not affect the return result and should be taken + // into account by the caller. + if (doCentroid) { + centroid = centroidSum; + } + return new S2AreaCentroid(areaSum, centroid); + } + + /** + * Return the area of the loop interior, i.e. the region on the left side of + * the loop. The return value is between 0 and 4*Pi and the true centroid of + * the loop multiplied by the area of the loop (see S2.java for details on + * centroids). Note that the centroid may not be contained by the loop. + */ + public S2AreaCentroid getAreaAndCentroid() { + return getAreaCentroid(true); + } + + /** + * Return the area of the polygon interior, i.e. the region on the left side + * of an odd number of loops. The return value is between 0 and 4*Pi. + */ + public double getArea() { + return getAreaCentroid(false).getArea(); + } + + /** + * Return the true centroid of the polygon multiplied by the area of the + * polygon (see {@link S2} for details on centroids). Note that the centroid + * may not be contained by the polygon. + */ + public S2Point getCentroid() { + return getAreaCentroid(true).getCentroid(); + } + + // The following are the possible relationships between two loops A and B: + // + // (1) A and B do not intersect. + // (2) A contains B. + // (3) B contains A. + // (4) The boundaries of A and B cross (i.e. the boundary of A + // intersects the interior and exterior of B and vice versa). + // (5) (A union B) is the entire sphere (i.e. A contains the + // complement of B and vice versa). + // + // More than one of these may be true at the same time, for example if + // A == B or A == Complement(B). + + /** + * Return true if the region contained by this loop is a superset of the + * region contained by the given other loop. + */ + public boolean contains(S2Loop b) { + // For this loop A to contains the given loop B, all of the following must + // be true: + // + // (1) There are no edge crossings between A and B except at vertices. + // + // (2) At every vertex that is shared between A and B, the local edge + // ordering implies that A contains B. + // + // (3) If there are no shared vertices, then A must contain a vertex of B + // and B must not contain a vertex of A. (An arbitrary vertex may be + // chosen in each case.) + // + // The second part of (3) is necessary to detect the case of two loops whose + // union is the entire sphere, i.e. two loops that contains each other's + // boundaries but not each other's interiors. + + if (!bound.contains(b.getRectBound())) { + return false; + } + + // Unless there are shared vertices, we need to check whether A contains a + // vertex of B. Since shared vertices are rare, it is more efficient to do + // this test up front as a quick rejection test. + if (!contains(b.vertex(0)) && findVertex(b.vertex(0)) < 0) { + return false; + } + + // Now check whether there are any edge crossings, and also check the loop + // relationship at any shared vertices. + if (checkEdgeCrossings(b, new S2EdgeUtil.WedgeContains()) <= 0) { + return false; + } + + // At this point we know that the boundaries of A and B do not intersect, + // and that A contains a vertex of B. However we still need to check for + // the case mentioned above, where (A union B) is the entire sphere. + // Normally this check is very cheap due to the bounding box precondition. + if (bound.union(b.getRectBound()).isFull()) { + if (b.contains(vertex(0)) && b.findVertex(vertex(0)) < 0) { + return false; + } + } + return true; + } + + /** + * Return true if the region contained by this loop intersects the region + * contained by the given other loop. + */ + public boolean intersects(S2Loop b) { + // a->Intersects(b) if and only if !a->Complement()->Contains(b). + // This code is similar to Contains(), but is optimized for the case + // where both loops enclose less than half of the sphere. + + if (!bound.intersects(b.getRectBound())) { + return false; + } + + // Normalize the arguments so that B has a smaller longitude span than A. + // This makes intersection tests much more efficient in the case where + // longitude pruning is used (see CheckEdgeCrossings). + if (b.getRectBound().lng().getLength() > bound.lng().getLength()) { + return b.intersects(this); + } + + // Unless there are shared vertices, we need to check whether A contains a + // vertex of B. Since shared vertices are rare, it is more efficient to do + // this test up front as a quick acceptance test. + if (contains(b.vertex(0)) && findVertex(b.vertex(0)) < 0) { + return true; + } + + // Now check whether there are any edge crossings, and also check the loop + // relationship at any shared vertices. + if (checkEdgeCrossings(b, new S2EdgeUtil.WedgeIntersects()) < 0) { + return true; + } + + // We know that A does not contain a vertex of B, and that there are no edge + // crossings. Therefore the only way that A can intersect B is if B + // entirely contains A. We can check this by testing whether B contains an + // arbitrary non-shared vertex of A. Note that this check is cheap because + // of the bounding box precondition and the fact that we normalized the + // arguments so that A's longitude span is at least as long as B's. + if (b.getRectBound().contains(bound)) { + if (b.contains(vertex(0)) && b.findVertex(vertex(0)) < 0) { + return true; + } + } + + return false; + } + + /** + * Given two loops of a polygon, return true if A contains B. This version of + * contains() is much cheaper since it does not need to check whether the + * boundaries of the two loops cross. + */ + public boolean containsNested(S2Loop b) { + if (!bound.contains(b.getRectBound())) { + return false; + } + + // We are given that A and B do not share any edges, and that either one + // loop contains the other or they do not intersect. + int m = findVertex(b.vertex(1)); + if (m < 0) { + // Since b->vertex(1) is not shared, we can check whether A contains it. + return contains(b.vertex(1)); + } + // Check whether the edge order around b->vertex(1) is compatible with + // A containin B. + return (new S2EdgeUtil.WedgeContains()).test( + vertex(m - 1), vertex(m), vertex(m + 1), b.vertex(0), b.vertex(2)) > 0; + } + + /** + * Return +1 if A contains B (i.e. the interior of B is a subset of the + * interior of A), -1 if the boundaries of A and B cross, and 0 otherwise. + * Requires that A does not properly contain the complement of B, i.e. A and B + * do not contain each other's boundaries. This method is used for testing + * whether multi-loop polygons contain each other. + */ + public int containsOrCrosses(S2Loop b) { + // There can be containment or crossing only if the bounds intersect. + if (!bound.intersects(b.getRectBound())) { + return 0; + } + + // Now check whether there are any edge crossings, and also check the loop + // relationship at any shared vertices. Note that unlike Contains() or + // Intersects(), we can't do a point containment test as a shortcut because + // we need to detect whether there are any edge crossings. + int result = checkEdgeCrossings(b, new S2EdgeUtil.WedgeContainsOrCrosses()); + + // If there was an edge crossing or a shared vertex, we know the result + // already. (This is true even if the result is 1, but since we don't + // bother keeping track of whether a shared vertex was seen, we handle this + // case below.) + if (result <= 0) { + return result; + } + + // At this point we know that the boundaries do not intersect, and we are + // given that (A union B) is a proper subset of the sphere. Furthermore + // either A contains B, or there are no shared vertices (due to the check + // above). So now we just need to distinguish the case where A contains B + // from the case where B contains A or the two loops are disjoint. + if (!bound.contains(b.getRectBound())) { + return 0; + } + if (!contains(b.vertex(0)) && findVertex(b.vertex(0)) < 0) { + return 0; + } + + return 1; + } + + /** + * Returns true if two loops have the same boundary except for vertex + * perturbations. More precisely, the vertices in the two loops must be in the + * same cyclic order, and corresponding vertex pairs must be separated by no + * more than maxError. Note: This method mostly useful only for testing + * purposes. + */ + boolean boundaryApproxEquals(S2Loop b, double maxError) { + if (numVertices() != b.numVertices()) { + return false; + } + int maxVertices = numVertices(); + int iThis = firstLogicalVertex; + int iOther = b.firstLogicalVertex; + for (int i = 0; i < maxVertices; ++i, ++iThis, ++iOther) { + if (!S2.approxEquals(vertex(iThis), b.vertex(iOther), maxError)) { + return false; + } + } + return true; + } + + // S2Region interface (see {@code S2Region} for details): + + /** Return a bounding spherical cap. */ + @Override + public S2Cap getCapBound() { + return bound.getCapBound(); + } + + + /** Return a bounding latitude-longitude rectangle. */ + @Override + public S2LatLngRect getRectBound() { + return bound; + } + + /** + * If this method returns true, the region completely contains the given cell. + * Otherwise, either the region does not contain the cell or the containment + * relationship could not be determined. + */ + @Override + public boolean contains(S2Cell cell) { + // It is faster to construct a bounding rectangle for an S2Cell than for + // a general polygon. A future optimization could also take advantage of + // the fact than an S2Cell is convex. + + S2LatLngRect cellBound = cell.getRectBound(); + if (!bound.contains(cellBound)) { + return false; + } + S2Loop cellLoop = new S2Loop(cell, cellBound); + return contains(cellLoop); + } + + /** + * If this method returns false, the region does not intersect the given cell. + * Otherwise, either region intersects the cell, or the intersection + * relationship could not be determined. + */ + @Override + public boolean mayIntersect(S2Cell cell) { + // It is faster to construct a bounding rectangle for an S2Cell than for + // a general polygon. A future optimization could also take advantage of + // the fact than an S2Cell is convex. + + S2LatLngRect cellBound = cell.getRectBound(); + if (!bound.intersects(cellBound)) { + return false; + } + return new S2Loop(cell, cellBound).intersects(this); + } + + /** + * The point 'p' does not need to be normalized. + */ + public boolean contains(S2Point p) { + if (!bound.contains(p)) { + return false; + } + + boolean inside = originInside; + S2Point origin = S2.origin(); + S2EdgeUtil.EdgeCrosser crosser = new S2EdgeUtil.EdgeCrosser(origin, p, vertex(0)); + + for (int i = 1; i <= numVertices(); ++i) { + inside ^= crosser.edgeOrVertexCrossing(vertex(i)); + } + return inside; + } + + /** + * Returns the shortest distance from a point P to this loop, given as the + * angle formed between P, the origin and the nearest point on the loop to P. + * This angle in radians is equivalent to the arclength along the unit sphere. + */ + public S1Angle getDistance(S2Point p) { + S2Point normalized = S2Point.normalize(p); + + // The furthest point from p on the sphere is its antipode, which is an + // angle of PI radians. This is an upper bound on the angle. + S1Angle minDistance = S1Angle.radians(Math.PI); + for (int i = 0; i < numVertices(); i++) { + minDistance = + S1Angle.min(minDistance, S2EdgeUtil.getDistance(normalized, vertex(i), vertex(i + 1))); + } + return minDistance; + } + + /** + * Creates an edge index over the given vertices, and if we predict sufficient + * calls to lookup edges in the index, then the indexing will actually happen + * before this method returns, otherwise index lookups will simply check every + * edge. + * + * @return an edge index over the given vertices. + */ + private static final S2EdgeIndex index(final List<S2Point> vertex, int numCalls) { + S2EdgeIndex edgeIndex = new S2EdgeIndex() { + int numVertices = vertex.size(); + @Override + protected int getNumEdges() { + return numVertices; + } + + @Override + protected S2Point edgeFrom(int index) { + return vertex.get(index); + } + + @Override + protected S2Point edgeTo(int index) { + return vertex.get((index + 1) % numVertices); + } + }; + edgeIndex.predictAdditionalCalls(numCalls); + return edgeIndex; + } + + /** Return true if the given vertices form a valid loop. */ + public static boolean isValid(final List<S2Point> vertices) { + // Loops must have at least 3 vertices. + final int numVertices = vertices.size(); + if (numVertices < 3) { + log.info("Degenerate loop"); + return false; + } + + // All vertices must be unit length. + for (int i = 0; i < numVertices; ++i) { + if (!S2.isUnitLength(vertices.get(i))) { + log.info("Vertex " + i + " is not unit length"); + return false; + } + } + + // Loops are not allowed to have any duplicate vertices. + HashMap<S2Point, Integer> vmap = Maps.newHashMap(); + for (int i = 0; i < numVertices; ++i) { + Integer previousVertexIndex = vmap.put(vertices.get(i), i); + if (previousVertexIndex != null) { + log.info("Duplicate vertices: " + previousVertexIndex + " and " + i); + return false; + } + } + + // Non-adjacent edges are not allowed to intersect. + boolean crosses = false; + S2EdgeIndex.DataEdgeIterator it = new S2EdgeIndex.DataEdgeIterator( + index(vertices, numVertices)); + for (int a1 = 0; a1 < numVertices; a1++) { + int a2 = (a1 + 1) % numVertices; + EdgeCrosser crosser = new EdgeCrosser(vertices.get(a1), vertices.get(a2), vertices.get(0)); + int previousIndex = -2; + for (it.getCandidates(vertices.get(a1), vertices.get(a2)); it.hasNext(); it.next()) { + int b1 = it.index(); + int b2 = (b1 + 1) % numVertices; + // If either 'a' index equals either 'b' index, then these two edges + // share a vertex. If a1==b1 then it must be the case that a2==b2, e.g. + // the two edges are the same. In that case, we skip the test, since we + // don't want to test an edge against itself. If a1==b2 or b1==a2 then + // we have one edge ending at the start of the other, or in other words, + // the edges share a vertex -- and in S2 space, where edges are always + // great circle segments on a sphere, edges can only intersect at most + // once, so we don't need to do further checks in that case either. + if (a1 != b2 && a2 != b1 && a1 != b1) { + // WORKAROUND(shakusa, ericv): S2.robustCCW() currently + // requires arbitrary-precision arithmetic to be truly robust. That + // means it can give the wrong answers in cases where we are trying + // to determine edge intersections. The workaround is to ignore + // intersections between edge pairs where all four points are + // nearly colinear. + double abc = S2.angle(vertices.get(a1), vertices.get(a2), vertices.get(b1)); + boolean abcNearlyLinear = S2.approxEquals(abc, 0D, MAX_INTERSECTION_ERROR) || + S2.approxEquals(abc, S2.M_PI, MAX_INTERSECTION_ERROR); + double abd = S2.angle(vertices.get(a1), vertices.get(a2), vertices.get(b2)); + boolean abdNearlyLinear = S2.approxEquals(abd, 0D, MAX_INTERSECTION_ERROR) || + S2.approxEquals(abd, S2.M_PI, MAX_INTERSECTION_ERROR); + if (abcNearlyLinear && abdNearlyLinear) { + continue; + } + + if (previousIndex != b1) { + crosser.restartAt(vertices.get(b1)); + } + + // Beware, this may return the loop is valid if there is a + // "vertex crossing". + // TODO(user): Fix that. + crosses = crosser.robustCrossing(vertices.get(b2)) > 0; + previousIndex = b2; + if (crosses ) { + log.info("Edges " + a1 + " and " + b1 + " cross"); + log.info(String.format("Edge locations in degrees: " + "%s-%s and %s-%s", + new S2LatLng(vertices.get(a1)).toStringDegrees(), + new S2LatLng(vertices.get(a2)).toStringDegrees(), + new S2LatLng(vertices.get(b1)).toStringDegrees(), + new S2LatLng(vertices.get(b2)).toStringDegrees())); + return false; + } + } + } + } + + return true; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder("S2Loop, "); + + builder.append(vertices.length).append(" points. ["); + + for (S2Point v : vertices) { + builder.append(v.toString()).append(" "); + } + builder.append("]"); + + return builder.toString(); + } + + private void initOrigin() { + // The bounding box does not need to be correct before calling this + // function, but it must at least contain vertex(1) since we need to + // do a Contains() test on this point below. + Preconditions.checkState(bound.contains(vertex(1))); + + // To ensure that every point is contained in exactly one face of a + // subdivision of the sphere, all containment tests are done by counting the + // edge crossings starting at a fixed point on the sphere (S2::Origin()). + // We need to know whether this point is inside or outside of the loop. + // We do this by first guessing that it is outside, and then seeing whether + // we get the correct containment result for vertex 1. If the result is + // incorrect, the origin must be inside the loop. + // + // A loop with consecutive vertices A,B,C contains vertex B if and only if + // the fixed vector R = S2::Ortho(B) is on the left side of the wedge ABC. + // The test below is written so that B is inside if C=R but not if A=R. + + originInside = false; // Initialize before calling Contains(). + boolean v1Inside = S2.orderedCCW(S2.ortho(vertex(1)), vertex(0), vertex(2), vertex(1)); + if (v1Inside != contains(vertex(1))) { + originInside = true; + } + } + + private void initBound() { + // The bounding rectangle of a loop is not necessarily the same as the + // bounding rectangle of its vertices. First, the loop may wrap entirely + // around the sphere (e.g. a loop that defines two revolutions of a + // candy-cane stripe). Second, the loop may include one or both poles. + // Note that a small clockwise loop near the equator contains both poles. + + S2EdgeUtil.RectBounder bounder = new S2EdgeUtil.RectBounder(); + for (int i = 0; i <= numVertices(); ++i) { + bounder.addPoint(vertex(i)); + } + S2LatLngRect b = bounder.getBound(); + // Note that we need to initialize bound with a temporary value since + // contains() does a bounding rectangle check before doing anything else. + bound = S2LatLngRect.full(); + if (contains(new S2Point(0, 0, 1))) { + b = new S2LatLngRect(new R1Interval(b.lat().lo(), S2.M_PI_2), S1Interval.full()); + } + // If a loop contains the south pole, then either it wraps entirely + // around the sphere (full longitude range), or it also contains the + // north pole in which case b.lng().isFull() due to the test above. + + if (b.lng().isFull() && contains(new S2Point(0, 0, -1))) { + b.lat().setLo(-S2.M_PI_2); + } + bound = b; + } + + private void initFromCell(S2Cell cell) { + numVertices = 4; + vertices = new S2Point[numVertices]; + depth = 0; + for (int i = 0; i < 4; ++i) { + vertices[i] = cell.getVertex(i); + } + initOrigin(); + initFirstLogicalVertex(); + } + + /** + * Return the index of a vertex at point "p", or -1 if not found. The return + * value is in the range 1..num_vertices_ if found. + */ + private int findVertex(S2Point p) { + for (int i = 1; i <= numVertices(); ++i) { + if (vertex(i).equals(p)) { + return i; + } + } + return -1; + } + + /** + * This method encapsulates the common code for loop containment and + * intersection tests. It is used in three slightly different variations to + * implement contains(), intersects(), and containsOrCrosses(). + * + * In a nutshell, this method checks all the edges of this polygon (A) for + * intersection with all the edges of B. It returns -1 immediately if any edge + * intersections are found. Otherwise, if there are any shared vertices, it + * returns the minimum value of the given WedgeRelation for all such vertices + * (returning immediately if any wedge returns -1). Returns +1 if there are no + * intersections and no shared vertices. + */ + private int checkEdgeCrossings(S2Loop b, S2EdgeUtil.WedgeRelation relation) { + if (b.numVertices() <= 6) { + int result = 1; + // For small loops (such as S2Cell boundaries), it is not worth computing + // longitude bounds to avoid edge tests, and it is more efficient to + // reverse the loop nesting. + for (int j = 0; j < b.numVertices(); ++j) { + S2EdgeUtil.EdgeCrosser crosser = + new S2EdgeUtil.EdgeCrosser(b.vertex(j), b.vertex(j + 1), vertex(0)); + for (int i = 0; i < numVertices(); ++i) { + int crossing = crosser.robustCrossing(vertex(i + 1)); + if (crossing < 0) { + continue; + } + if (crossing > 0) { + return -1; // There is a proper edge crossing. + } + // We only need to check each shared vertex once, so we only + // consider the case where vertex(i+1) == b->vertex(j+1). + if (vertex(i + 1).equals(b.vertex(j + 1))) { + result = Math.min(result, relation.test( + vertex(i), vertex(i + 1), vertex(i + 2), b.vertex(j), b.vertex(j + 2))); + + if (result < 0) { + return result; + } + } + } + } + return result; + } + // For larger loops, we can save a lot of edge tests by first checking + // whether each edge of the outer loop intersects the longitude interval + // of the entire inner loop. + int result = 1; + S2EdgeUtil.LongitudePruner pruner = + new S2EdgeUtil.LongitudePruner(b.getRectBound().lng(), vertex(0)); + for (int i = 0; i < numVertices(); ++i) { + if (!pruner.intersects(vertex(i + 1))) { + continue; + } + S2EdgeUtil.EdgeCrosser crosser = + new S2EdgeUtil.EdgeCrosser(vertex(i), vertex(i + 1), b.vertex(0)); + for (int j = 0; j < b.numVertices(); ++j) { + int crossing = crosser.robustCrossing(b.vertex(j + 1)); + if (crossing < 0) { + continue; + } + if (crossing > 0) { + return -1; // There is a proper edge crossing. + } + if (vertex(i + 1).equals(b.vertex(j + 1))) { + result = Math.min(result, relation.test( + vertex(i), vertex(i + 1), vertex(i + 2), b.vertex(j), b.vertex(j + 2))); + if (result < 0) { + return result; + } + } + } + } + return result; + } +} diff --git a/src/com/google/common/geometry/S2Point.java b/src/com/google/common/geometry/S2Point.java new file mode 100644 index 0000000..89a2322 --- /dev/null +++ b/src/com/google/common/geometry/S2Point.java @@ -0,0 +1,200 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * An S2Point represents a point on the unit sphere as a 3D vector. Usually + * points are normalized to be unit length, but some methods do not require + * this. + * + */ +public strictfp class S2Point implements Comparable<S2Point> { + // coordinates of the points + final double x; + final double y; + final double z; + + public S2Point() { + x = y = z = 0; + } + + public S2Point(double x, double y, double z) { + this.x = x; + this.y = y; + this.z = z; + } + + public static S2Point minus(S2Point p1, S2Point p2) { + return sub(p1, p2); + } + + public static S2Point neg(S2Point p) { + return new S2Point(-p.x, -p.y, -p.z); + } + + public double norm2() { + return x * x + y * y + z * z; + } + + public double norm() { + return Math.sqrt(norm2()); + } + + public static S2Point crossProd(final S2Point p1, final S2Point p2) { + return new S2Point( + p1.y * p2.z - p1.z * p2.y, p1.z * p2.x - p1.x * p2.z, p1.x * p2.y - p1.y * p2.x); + } + + public static S2Point add(final S2Point p1, final S2Point p2) { + return new S2Point(p1.x + p2.x, p1.y + p2.y, p1.z + p2.z); + } + + public static S2Point sub(final S2Point p1, final S2Point p2) { + return new S2Point(p1.x - p2.x, p1.y - p2.y, p1.z - p2.z); + } + + public double dotProd(S2Point that) { + return this.x * that.x + this.y * that.y + this.z * that.z; + } + + public static S2Point mul(final S2Point p, double m) { + return new S2Point(m * p.x, m * p.y, m * p.z); + } + + public static S2Point div(final S2Point p, double m) { + return new S2Point(p.x / m, p.y / m, p.z / m); + } + + /** return a vector orthogonal to this one */ + public S2Point ortho() { + int k = largestAbsComponent(); + S2Point temp; + if (k == 1) { + temp = new S2Point(1, 0, 0); + } else if (k == 2) { + temp = new S2Point(0, 1, 0); + } else { + temp = new S2Point(0, 0, 1); + } + return S2Point.normalize(crossProd(this, temp)); + } + + /** Return the index of the largest component fabs */ + public int largestAbsComponent() { + S2Point temp = fabs(this); + if (temp.x > temp.y) { + if (temp.x > temp.z) { + return 0; + } else { + return 2; + } + } else { + if (temp.y > temp.z) { + return 1; + } else { + return 2; + } + } + } + + public static S2Point fabs(S2Point p) { + return new S2Point(Math.abs(p.x), Math.abs(p.y), Math.abs(p.z)); + } + + public static S2Point normalize(S2Point p) { + double norm = p.norm(); + if (norm != 0) { + norm = 1.0 / norm; + } + return S2Point.mul(p, norm); + } + + public double get(int axis) { + return (axis == 0) ? x : (axis == 1) ? y : z; + } + + /** Return the angle between two vectors in radians */ + public double angle(S2Point va) { + return Math.atan2(crossProd(this, va).norm(), this.dotProd(va)); + } + + /** + * Compare two vectors, return true if all their components are within a + * difference of margin. + */ + boolean aequal(S2Point that, double margin) { + return (Math.abs(x - that.x) < margin) && (Math.abs(y - that.y) < margin) + && (Math.abs(z - that.z) < margin); + } + + @Override + public boolean equals(Object that) { + if (!(that instanceof S2Point)) { + return false; + } + S2Point thatPoint = (S2Point) that; + return this.x == thatPoint.x && this.y == thatPoint.y && this.z == thatPoint.z; + } + + public boolean lessThan(S2Point vb) { + if (x < vb.x) { + return true; + } + if (vb.x < x) { + return false; + } + if (y < vb.y) { + return true; + } + if (vb.y < y) { + return false; + } + if (z < vb.z) { + return true; + } + return false; + } + + // Required for Comparable + @Override + public int compareTo(S2Point other) { + return (lessThan(other) ? -1 : (equals(other) ? 0 : 1)); + } + + @Override + public String toString() { + return "(" + x + ", " + y + ", " + z + ")"; + } + + public String toDegreesString() { + S2LatLng s2LatLng = new S2LatLng(this); + return "(" + Double.toString(s2LatLng.latDegrees()) + ", " + + Double.toString(s2LatLng.lngDegrees()) + ")"; + } + + /** + * Calcualates hashcode based on stored coordinates. Since we want +0.0 and + * -0.0 to be treated the same, we ignore the sign of the coordinates. + */ + @Override + public int hashCode() { + long value = 17; + value += 37 * value + Double.doubleToLongBits(Math.abs(x)); + value += 37 * value + Double.doubleToLongBits(Math.abs(y)); + value += 37 * value + Double.doubleToLongBits(Math.abs(z)); + return (int) (value ^ (value >>> 32)); + } +} diff --git a/src/com/google/common/geometry/S2Polygon.java b/src/com/google/common/geometry/S2Polygon.java new file mode 100644 index 0000000..0ae8492 --- /dev/null +++ b/src/com/google/common/geometry/S2Polygon.java @@ -0,0 +1,1175 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.base.Preconditions; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Multiset; +import com.google.common.collect.TreeMultimap; +import com.google.common.collect.TreeMultiset; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.logging.Logger; + +/** + * An S2Polygon is an S2Region object that represents a polygon. A polygon + * consists of zero or more {@link S2Loop loops} representing "shells" and + * "holes". All loops should be oriented CCW, i.e. the shell or hole is on the + * left side of the loop. Loops may be specified in any order. A point is + * defined to be inside the polygon if it is contained by an odd number of + * loops. + * + * Polygons have the following restrictions: + * + * - Loops may not cross, i.e. the boundary of a loop may not intersect both + * the interior and exterior of any other loop. + * + * - Loops may not share edges, i.e. if a loop contains an edge AB, then no + * other loop may contain AB or BA. + * + * - No loop may cover more than half the area of the sphere. This ensures that + * no loop properly contains the complement of any other loop, even if the loops + * are from different polygons. (Loops that represent exact hemispheres are + * allowed.) + * + * Loops may share vertices, however no vertex may appear twice in a single + * loop. + * + */ +public strictfp class S2Polygon implements S2Region, Comparable<S2Polygon> { + private static final Logger log = Logger.getLogger(S2Polygon.class.getCanonicalName()); + + private List<S2Loop> loops; + + private S2LatLngRect bound; + private boolean hasHoles; + private int numVertices; + + // TODO(kirilll): Get rid of debug mode. Turn it into tests. Should the debug + // mode be set to false by default, anyways? + public static boolean DEBUG = true; + + /** + * Creates an empty polygon that should be initialized by calling Init(). + */ + public S2Polygon() { + this.loops = Lists.newArrayList(); + this.bound = S2LatLngRect.empty(); + this.hasHoles = false; + this.numVertices = 0; + } + + /** + * Convenience constructor that calls Init() with the given loops. Clears the + * given list. + */ + public S2Polygon(List<S2Loop> loops) { + this.loops = Lists.newArrayList(); + this.bound = S2LatLngRect.empty(); + + init(loops); + } + + /** + * Copy constructor. + */ + public S2Polygon(S2Loop loop) { + this.loops = Lists.newArrayList(); + this.bound = loop.getRectBound(); + this.hasHoles = false; + this.numVertices = loop.numVertices(); + + loops.add(loop); + } + + /** + * Copy constructor. + */ + public S2Polygon(S2Polygon src) { + this.loops = Lists.newArrayList(); + this.bound = src.getRectBound(); + this.hasHoles = src.hasHoles; + this.numVertices = src.numVertices; + + for (int i = 0; i < src.numLoops(); ++i) { + loops.add(new S2Loop(src.loop(i))); + } + } + + /** + * Comparator (needed by Comparable interface). For two polygons to be + * compared as equal: - the must have the same number of loops; - the loops + * must be ordered in the same way (this is guaranteed by the total ordering + * imposed by sortValueLoops). - loops must be logically equivalent (even if + * ordered with a different starting point, e.g. ABCD and BCDA). + */ + @Override + public int compareTo(S2Polygon other) { + // If number of loops differ, use that. + if (this.numLoops() != other.numLoops()) { + return this.numLoops() - other.numLoops(); + } + for (int i = 0; i < this.numLoops(); ++i) { + int compare = this.loops.get(i).compareTo(other.loops.get(i)); + if (compare != 0) { + return compare; + } + } + return 0; + } + + /** + * Initialize a polygon by taking ownership of the given loops and clearing + * the given list. This method figures out the loop nesting hierarchy and then + * reorders the loops by following a preorder traversal. This implies that + * each loop is immediately followed by its descendants in the nesting + * hierarchy. (See also getParent and getLastDescendant.) + */ + public void init(List<S2Loop> loops) { + if (DEBUG) { + // assert (isValid(loops)); + } + // assert (this.loops.isEmpty()); + + Map<S2Loop, List<S2Loop>> loopMap = Maps.newHashMap(); + // Yes, a null key is valid. It is used here to refer to the root of the + // loopMap + loopMap.put(null, Lists.<S2Loop>newArrayList()); + + for (S2Loop loop : loops) { + insertLoop(loop, null, loopMap); + this.numVertices += loop.numVertices(); + } + loops.clear(); + + // Sort all of the lists of loops; in this way we guarantee a total ordering + // on loops in the polygon. Loops will be sorted by their natural ordering, + // while also preserving the requirement that each loop is immediately + // followed by its descendants in the nesting hierarchy. + // + // TODO(andriy): as per kirilll in CL 18750833 code review comments: + // This should work for now, but I think it's possible to guarantee the + // correct order inside insertLoop by searching for the correct position in + // the children list before inserting. + sortValueLoops(loopMap); + + // Reorder the loops in depth-first traversal order. + // Starting at null == starting at the root + initLoop(null, -1, loopMap); + + if (DEBUG) { + // Check that the LoopMap is correct (this is fairly cheap). + for (int i = 0; i < numLoops(); ++i) { + for (int j = 0; j < numLoops(); ++j) { + if (i == j) { + continue; + } + // assert (containsChild(loop(i), loop(j), loopMap) == loop(i).containsNested(loop(j))); + } + } + } + + // Compute the bounding rectangle of the entire polygon. + hasHoles = false; + bound = S2LatLngRect.empty(); + for (int i = 0; i < numLoops(); ++i) { + if (loop(i).sign() < 0) { + hasHoles = true; + } else { + bound = bound.union(loop(i).getRectBound()); + } + } + } + + /** + * Release ownership of the loops of this polygon by appending them to the + * given list. Resets the polygon to be empty. + */ + public void release(List<S2Loop> loops) { + loops.addAll(this.loops); + this.loops.clear(); + bound = S2LatLngRect.empty(); + hasHoles = false; + numVertices = 0; + } + + /** + * Return true if the given loops form a valid polygon. Assumes that that all + * of the given loops have already been validated. + */ + public static boolean isValid(final List<S2Loop> loops) { + // If a loop contains an edge AB, then no other loop may contain AB or BA. + // We only need this test if there are at least two loops, assuming that + // each loop has already been validated. + if (loops.size() > 1) { + Map<UndirectedEdge, LoopVertexIndexPair> edges = Maps.newHashMap(); + for (int i = 0; i < loops.size(); ++i) { + S2Loop lp = loops.get(i); + for (int j = 0; j < lp.numVertices(); ++j) { + UndirectedEdge key = new UndirectedEdge(lp.vertex(j), lp.vertex(j + 1)); + LoopVertexIndexPair value = new LoopVertexIndexPair(i, j); + if (edges.containsKey(key)) { + LoopVertexIndexPair other = edges.get(key); + log.info( + "Duplicate edge: loop " + i + ", edge " + j + " and loop " + other.getLoopIndex() + + ", edge " + other.getVertexIndex()); + return false; + } else { + edges.put(key, value); + } + } + } + } + + // Verify that no loop covers more than half of the sphere, and that no + // two loops cross. + for (int i = 0; i < loops.size(); ++i) { + if (!loops.get(i).isNormalized()) { + log.info("Loop " + i + " encloses more than half the sphere"); + return false; + } + for (int j = i + 1; j < loops.size(); ++j) { + // This test not only checks for edge crossings, it also detects + // cases where the two boundaries cross at a shared vertex. + if (loops.get(i).containsOrCrosses(loops.get(j)) < 0) { + log.info("Loop " + i + " crosses loop " + j); + return false; + } + } + } + return true; + } + + public int numLoops() { + return loops.size(); + } + + public S2Loop loop(int k) { + return loops.get(k); + } + + /** + * Return the index of the parent of loop k, or -1 if it has no parent. + */ + public int getParent(int k) { + int depth = loop(k).depth(); + if (depth == 0) { + return -1; // Optimization. + } + while (--k >= 0 && loop(k).depth() >= depth) { + // spin + } + return k; + } + + /** + * Return the index of the last loop that is contained within loop k. Returns + * num_loops() - 1 if k < 0. Note that loops are indexed according to a + * preorder traversal of the nesting hierarchy, so the immediate children of + * loop k can be found by iterating over loops (k+1)..getLastDescendant(k) and + * selecting those whose depth is equal to (loop(k).depth() + 1). + */ + public int getLastDescendant(int k) { + if (k < 0) { + return numLoops() - 1; + } + int depth = loop(k).depth(); + while (++k < numLoops() && loop(k).depth() > depth) { + // spin + } + return k - 1; + } + + private S2AreaCentroid getAreaCentroid(boolean doCentroid) { + double areaSum = 0; + S2Point centroidSum = new S2Point(0, 0, 0); + for (int i = 0; i < numLoops(); ++i) { + S2AreaCentroid areaCentroid = doCentroid ? loop(i).getAreaAndCentroid() : null; + double loopArea = doCentroid ? areaCentroid.getArea() : loop(i).getArea(); + + int loopSign = loop(i).sign(); + areaSum += loopSign * loopArea; + if (doCentroid) { + S2Point currentCentroid = areaCentroid.getCentroid(); + centroidSum = + new S2Point(centroidSum.x + loopSign * currentCentroid.x, + centroidSum.y + loopSign * currentCentroid.y, + centroidSum.z + loopSign * currentCentroid.z); + } + } + + return new S2AreaCentroid(areaSum, doCentroid ? centroidSum : null); + } + + /** + * Return the area of the polygon interior, i.e. the region on the left side + * of an odd number of loops (this value return value is between 0 and 4*Pi) + * and the true centroid of the polygon multiplied by the area of the polygon + * (see s2.h for details on centroids). Note that the centroid may not be + * contained by the polygon. + */ + public S2AreaCentroid getAreaAndCentroid() { + return getAreaCentroid(true); + } + + /** + * Return the area of the polygon interior, i.e. the region on the left side + * of an odd number of loops. The return value is between 0 and 4*Pi. + */ + public double getArea() { + return getAreaCentroid(false).getArea(); + } + + /** + * Return the true centroid of the polygon multiplied by the area of the + * polygon (see s2.h for details on centroids). Note that the centroid may not + * be contained by the polygon. + */ + public S2Point getCentroid() { + return getAreaCentroid(true).getCentroid(); + } + + /** + * Returns the shortest distance from a point P to this polygon, given as the + * angle formed between P, the origin and the nearest point on the polygon to + * P. This angle in radians is equivalent to the arclength along the unit + * sphere. + * + * If the point is contained inside the polygon, the distance returned is 0. + */ + public S1Angle getDistance(S2Point p) { + if (contains(p)) { + return S1Angle.radians(0); + } + + // The furthest point from p on the sphere is its antipode, which is an + // angle of PI radians. This is an upper bound on the angle. + S1Angle minDistance = S1Angle.radians(Math.PI); + for (int i = 0; i < numLoops(); i++) { + minDistance = S1Angle.min(minDistance, loop(i).getDistance(p)); + } + + return minDistance; + } + + + /** + * Return true if this polygon contains the given other polygon, i.e. if + * polygon A contains all points contained by polygon B. + */ + public boolean contains(S2Polygon b) { + // If both polygons have one loop, use the more efficient S2Loop method. + // Note that S2Loop.contains does its own bounding rectangle check. + if (numLoops() == 1 && b.numLoops() == 1) { + return loop(0).contains(b.loop(0)); + } + + // Otherwise if neither polygon has holes, we can still use the more + // efficient S2Loop::Contains method (rather than ContainsOrCrosses), + // but it's worthwhile to do our own bounds check first. + if (!bound.contains(b.getRectBound())) { + // If the union of the bounding boxes spans the full longitude range, + // it is still possible that polygon A contains B. (This is only + // possible if at least one polygon has multiple shells.) + if (!bound.lng().union(b.getRectBound().lng()).isFull()) { + return false; + } + } + if (!hasHoles && !b.hasHoles) { + for (int j = 0; j < b.numLoops(); ++j) { + if (!anyLoopContains(b.loop(j))) { + return false; + } + } + return true; + } + + // This could be implemented more efficiently for polygons with lots of + // holes by keeping a copy of the LoopMap computed during initialization. + // However, in practice most polygons are one loop, and multiloop polygons + // tend to consist of many shells rather than holes. In any case, the real + // way to get more efficiency is to implement a sub-quadratic algorithm + // such as building a trapezoidal map. + + // Every shell of B must be contained by an odd number of loops of A, + // and every hole of A must be contained by an even number of loops of B. + return containsAllShells(b) && b.excludesAllHoles(this); + } + + /** + * Return true if this polygon intersects the given other polygon, i.e. if + * there is a point that is contained by both polygons. + */ + public boolean intersects(S2Polygon b) { + // A.intersects(B) if and only if !complement(A).contains(B). However, + // implementing a complement() operation is trickier than it sounds, + // and in any case it's more efficient to test for intersection directly. + + // If both polygons have one loop, use the more efficient S2Loop method. + // Note that S2Loop.intersects does its own bounding rectangle check. + if (numLoops() == 1 && b.numLoops() == 1) { + return loop(0).intersects(b.loop(0)); + } + + // Otherwise if neither polygon has holes, we can still use the more + // efficient S2Loop.intersects method. The polygons intersect if and + // only if some pair of loop regions intersect. + if (!bound.intersects(b.getRectBound())) { + return false; + } + if (!hasHoles && !b.hasHoles) { + for (int i = 0; i < numLoops(); ++i) { + for (int j = 0; j < b.numLoops(); ++j) { + if (loop(i).intersects(b.loop(j))) { + return true; + } + } + } + return false; + } + + // Otherwise if any shell of B is contained by an odd number of loops of A, + // or any shell of A is contained by an odd number of loops of B, there is + // an intersection. + return intersectsAnyShell(b) || b.intersectsAnyShell(this); + } + + /** + * Indexing structure to efficiently clipEdge() of a polygon. This is an + * abstract class because we need to use if for both polygons (for + * initToIntersection() and friends) and for sets of lists of points (for + * initToSimplified()). + * + * Usage -- in your subclass: - Call addLoop() for each of your loops -- and + * keep them accessible in your subclass. - Overwrite edgeFromTo(), calling + * decodeIndex() and accessing your underlying data with the resulting two + * indices. + */ + private abstract static class S2LoopSequenceIndex extends S2EdgeIndex { + public S2LoopSequenceIndex() { + numEdges = 0; + numLoops = 0; + indexToLoop = Lists.newArrayList(); + loopToFirstIndex = Lists.newArrayList(); + } + + public void addLoop(int numVertices) { + int verticesSoFar = numEdges; + loopToFirstIndex.add(verticesSoFar); + indexToLoop.ensureCapacity(verticesSoFar + numVertices); + for (int i = 0; i < numVertices; ++i) { + indexToLoop.add(verticesSoFar, numLoops); + ++verticesSoFar; + } + numEdges += numVertices; + ++numLoops; + } + + public LoopVertexIndexPair decodeIndex(int index) { + int loopIndex = indexToLoop.get(index); + int vertexInLoop = index - loopToFirstIndex.get(loopIndex); + return new LoopVertexIndexPair(loopIndex, vertexInLoop); + } + + // It is faster to return both vertices at once. It makes a difference + // for small polygons. + public abstract S2Edge edgeFromTo(int index); + + @Override + protected int getNumEdges() { + return numEdges; + } + + @Override + public S2Point edgeFrom(int index) { + S2Edge fromTo = edgeFromTo(index); + S2Point from = fromTo.getStart(); + return from; + } + + @Override + protected S2Point edgeTo(int index) { + S2Edge fromTo = edgeFromTo(index); + S2Point to = fromTo.getEnd(); + return to; + } + + // Map from the unidimensional edge index to the loop this edge belongs to. + ArrayList<Integer> indexToLoop; + // Reverse of index_to_loop_: maps a loop index to the + // unidimensional index of the first edge in the loop. + List<Integer> loopToFirstIndex; + + // Total number of edges. + int numEdges; + + // Total number of loops. + int numLoops; + } + + // Indexing structure for an S2Polygon. + static class S2PolygonIndex extends S2LoopSequenceIndex { + private S2Polygon poly; + private boolean reverse; + + public S2PolygonIndex(S2Polygon poly, boolean reverse) { + this.poly = poly; + this.reverse = reverse; + for (int i = 0; i < poly.numLoops(); ++i) { + addLoop(poly.loop(i).numVertices()); + } + } + + @Override + public S2Edge edgeFromTo(int index) { + int loopIndex = 0; + int vertexInLoop = 0; + LoopVertexIndexPair indices = decodeIndex(index); + loopIndex = indices.getLoopIndex(); + vertexInLoop = indices.getVertexIndex(); + S2Loop loop = new S2Loop(poly.loop(loopIndex)); + int fromIndex; + int toIndex; + if (loop.isHole() ^ reverse) { + fromIndex = loop.numVertices() - 1 - vertexInLoop; + toIndex = 2 * loop.numVertices() - 2 - vertexInLoop; + } else { + fromIndex = vertexInLoop; + toIndex = vertexInLoop + 1; + } + S2Point from = loop.vertex(fromIndex); + S2Point to = loop.vertex(toIndex); + return new S2Edge(from, to); + } + } + + private static void addIntersection(S2Point a0, + S2Point a1, + S2Point b0, + S2Point b1, + boolean addSharedEdges, + int crossing, + List<ParametrizedS2Point> intersections) { + if (crossing > 0) { + // There is a proper edge crossing. + S2Point x = S2EdgeUtil.getIntersection(a0, a1, b0, b1); + double t = S2EdgeUtil.getDistanceFraction(x, a0, a1); + intersections.add(new ParametrizedS2Point(t, x)); + } else if (S2EdgeUtil.vertexCrossing(a0, a1, b0, b1)) { + // There is a crossing at one of the vertices. The basic rule is simple: + // if a0 equals one of the "b" vertices, the crossing occurs at t=0; + // otherwise, it occurs at t=1. + // + // This has the effect that when two symmetric edges are encountered (an + // edge an its reverse), neither one is included in the output. When two + // duplicate edges are encountered, both are included in the output. The + // "addSharedEdges" flag allows one of these two copies to be removed by + // changing its intersection parameter from 0 to 1. + double t = (a0 == b0 || a0 == b1) ? 0 : 1; + if (!addSharedEdges && a1 == b1) { + t = 1; + } + intersections.add(new ParametrizedS2Point(t, t == 0 ? a0 : a1)); + } + } + + /** + * Find all points where the polygon B intersects the edge (a0,a1), and add + * the corresponding parameter values (in the range [0,1]) to "intersections". + */ + private static void clipEdge(final S2Point a0, final S2Point a1, S2LoopSequenceIndex bIndex, + boolean addSharedEdges, List<ParametrizedS2Point> intersections) { + S2LoopSequenceIndex.DataEdgeIterator it = new S2LoopSequenceIndex.DataEdgeIterator(bIndex); + it.getCandidates(a0, a1); + S2EdgeUtil.EdgeCrosser crosser = new S2EdgeUtil.EdgeCrosser(a0, a1, a0); + S2Point from = null; + S2Point to = null; + for (; it.hasNext(); it.next()) { + S2Point previousTo = to; + S2Edge fromTo = bIndex.edgeFromTo(it.index()); + from = fromTo.getStart(); + to = fromTo.getEnd(); + if (previousTo != from) { + crosser.restartAt(from); + } + int crossing = crosser.robustCrossing(to); + if (crossing < 0) { + continue; + } + addIntersection(a0, a1, from, to, addSharedEdges, crossing, intersections); + } + } + + /** + * Clip the boundary of A to the interior of B, and add the resulting edges to + * "builder". Shells are directed CCW and holes are directed clockwise, unless + * "reverseA" or "reverseB" is true in which case these directions in the + * corresponding polygon are reversed. If "invertB" is true, the boundary of A + * is clipped to the exterior rather than the interior of B. If + * "adSharedEdges" is true, then the output will include any edges that are + * shared between A and B (both edges must be in the same direction after any + * edge reversals are taken into account). + */ + private static void clipBoundary(final S2Polygon a, + boolean reverseA, + final S2Polygon b, + boolean reverseB, + boolean invertB, + boolean addSharedEdges, + S2PolygonBuilder builder) { + S2PolygonIndex bIndex = new S2PolygonIndex(b, reverseB); + bIndex.predictAdditionalCalls(a.getNumVertices()); + + List<ParametrizedS2Point> intersections = Lists.newArrayList(); + for (S2Loop aLoop : a.loops) { + int n = aLoop.numVertices(); + int dir = (aLoop.isHole() ^ reverseA) ? -1 : 1; + boolean inside = b.contains(aLoop.vertex(0)) ^ invertB; + for (int j = (dir > 0) ? 0 : n; n > 0; --n, j += dir) { + S2Point a0 = aLoop.vertex(j); + S2Point a1 = aLoop.vertex(j + dir); + intersections.clear(); + clipEdge(a0, a1, bIndex, addSharedEdges, intersections); + + if (inside) { + intersections.add(new ParametrizedS2Point(0.0, a0)); + } + inside = ((intersections.size() & 0x1) == 0x1); + if ((b.contains(a1) ^ invertB) != inside) { + // TODO(andriy): do something more meaningful here. + // Better yet, refactor and move this check to the unit test. + log.severe("Internal clipBoundary error."); + } + if (inside) { + intersections.add(new ParametrizedS2Point(1.0, a1)); + } + + // Remove duplicates and produce a list of unique intersections. + TreeMultiset<ParametrizedS2Point> sortedIntersections = TreeMultiset.create(); + sortedIntersections.addAll(intersections); + Iterator<ParametrizedS2Point> iter = sortedIntersections.iterator(); + while (iter.hasNext()) { + S2Point p0 = iter.next().getPoint(); + if (!iter.hasNext()) { + break; + } + S2Point p1 = iter.next().getPoint(); + if (p0.equals(p1)) { + continue; + } + builder.addEdge(p0, p1); + } + } + } + } + + /** + * Returns total number of vertices in all loops. + */ + public int getNumVertices() { + return this.numVertices; + } + + /** + * Initialize this polygon to the intersection, union, or difference (A - B) + * of the given two polygons. The "vertexMergeRadius" determines how close two + * vertices must be to be merged together and how close a vertex must be to an + * edge in order to be spliced into it (see S2PolygonBuilder for details). By + * default, the merge radius is just large enough to compensate for errors + * that occur when computing intersection points between edges + * (S2EdgeUtil.DEFAULT_INTERSECTION_TOLERANCE). + * + * If you are going to convert the resulting polygon to a lower-precision + * format, it is necessary to increase the merge radius in order to get a + * valid result after rounding (i.e. no duplicate vertices, etc). For example, + * if you are going to convert them to geostore.PolygonProto format, then + * S1Angle.e7(1) is a good value for "vertex_merge_radius". + */ + public void initToIntersection(final S2Polygon a, final S2Polygon b) { + initToIntersectionSloppy(a, b, S2EdgeUtil.DEFAULT_INTERSECTION_TOLERANCE); + } + + public void initToIntersectionSloppy( + final S2Polygon a, final S2Polygon b, S1Angle vertexMergeRadius) { + Preconditions.checkState(numLoops() == 0); + if (!a.bound.intersects(b.bound)) { + return; + } + + // We want the boundary of A clipped to the interior of B, + // plus the boundary of B clipped to the interior of A, + // plus one copy of any directed edges that are in both boundaries. + + S2PolygonBuilder.Options options = S2PolygonBuilder.Options.DIRECTED_XOR; + options.setMergeDistance(vertexMergeRadius); + S2PolygonBuilder builder = new S2PolygonBuilder(options); + clipBoundary(a, false, b, false, false, true, builder); + clipBoundary(b, false, a, false, false, false, builder); + if (!builder.assemblePolygon(this, null)) { + // TODO (andriy): do something more meaningful here. + log.severe("Bad directed edges"); + } + } + + public void initToUnion(final S2Polygon a, final S2Polygon b) { + initToUnionSloppy(a, b, S2EdgeUtil.DEFAULT_INTERSECTION_TOLERANCE); + } + + public void initToUnionSloppy(final S2Polygon a, final S2Polygon b, S1Angle vertexMergeRadius) { + Preconditions.checkState(numLoops() == 0); + + // We want the boundary of A clipped to the exterior of B, + // plus the boundary of B clipped to the exterior of A, + // plus one copy of any directed edges that are in both boundaries. + + S2PolygonBuilder.Options options = S2PolygonBuilder.Options.DIRECTED_XOR; + options.setMergeDistance(vertexMergeRadius); + S2PolygonBuilder builder = new S2PolygonBuilder(options); + clipBoundary(a, false, b, false, true, true, builder); + clipBoundary(b, false, a, false, true, false, builder); + if (!builder.assemblePolygon(this, null)) { + // TODO(andriy): do something more meaningful here. + log.severe("Bad directed edges"); + } + } + + /** + * Return a polygon which is the union of the given polygons. Note: clears the + * List! + */ + public static S2Polygon destructiveUnion(List<S2Polygon> polygons) { + return destructiveUnionSloppy(polygons, S2EdgeUtil.DEFAULT_INTERSECTION_TOLERANCE); + } + + /** + * Return a polygon which is the union of the given polygons; combines + * vertices that form edges that are almost identical, as defined by + * vertexMergeRadius. Note: clears the List! + */ + public static S2Polygon destructiveUnionSloppy( + List<S2Polygon> polygons, S1Angle vertexMergeRadius) { + // Effectively create a priority queue of polygons in order of number of + // vertices. Repeatedly union the two smallest polygons and add the result + // to the queue until we have a single polygon to return. + + // map: # of vertices -> polygon + TreeMultimap<Integer, S2Polygon> queue = TreeMultimap.create(); + + for (S2Polygon polygon : polygons) { + queue.put(polygon.getNumVertices(), polygon); + } + polygons.clear(); + + Set<Map.Entry<Integer, S2Polygon>> queueSet = queue.entries(); + while (queueSet.size() > 1) { + // Pop two simplest polygons from queue. + queueSet = queue.entries(); + Iterator<Map.Entry<Integer, S2Polygon>> smallestIter = queueSet.iterator(); + + Map.Entry<Integer, S2Polygon> smallest = smallestIter.next(); + int aSize = smallest.getKey().intValue(); + S2Polygon aPolygon = smallest.getValue(); + smallestIter.remove(); + + smallest = smallestIter.next(); + int bSize = smallest.getKey().intValue(); + S2Polygon bPolygon = smallest.getValue(); + smallestIter.remove(); + + // Union and add result back to queue. + S2Polygon unionPolygon = new S2Polygon(); + unionPolygon.initToUnionSloppy(aPolygon, bPolygon, vertexMergeRadius); + int unionSize = aSize + bSize; + queue.put(unionSize, unionPolygon); + // We assume that the number of vertices in the union polygon is the + // sum of the number of vertices in the original polygons, which is not + // always true, but will almost always be a decent approximation, and + // faster than recomputing. + } + + if (queue.isEmpty()) { + return new S2Polygon(); + } else { + return queue.get(queue.asMap().firstKey()).first(); + } + } + + public boolean isNormalized() { + Multiset<S2Point> vertices = HashMultiset.<S2Point>create(); + S2Loop lastParent = null; + for (int i = 0; i < numLoops(); ++i) { + S2Loop child = loop(i); + if (child.depth() == 0) { + continue; + } + S2Loop parent = loop(getParent(i)); + if (parent != lastParent) { + vertices.clear(); + for (int j = 0; j < parent.numVertices(); ++j) { + vertices.add(parent.vertex(j)); + } + lastParent = parent; + } + int count = 0; + for (int j = 0; j < child.numVertices(); ++j) { + if (vertices.count(child.vertex(j)) > 0) { + ++count; + } + } + if (count > 1) { + return false; + } + } + return true; + } + + /** + * Return true if two polygons have the same boundary except for vertex + * perturbations. Both polygons must have loops with the same cyclic vertex + * order and the same nesting hierarchy, but the vertex locations are allowed + * to differ by up to "max_error". Note: This method mostly useful only for + * testing purposes. + */ + boolean boundaryApproxEquals(S2Polygon b, double maxError) { + if (numLoops() != b.numLoops()) { + log.severe( + "!= loops: " + Integer.toString(numLoops()) + " vs. " + Integer.toString(b.numLoops())); + return false; + } + + // For now, we assume that there is at most one candidate match for each + // loop. (So far this method is just used for testing.) + for (int i = 0; i < numLoops(); ++i) { + S2Loop aLoop = loop(i); + boolean success = false; + for (int j = 0; j < numLoops(); ++j) { + S2Loop bLoop = b.loop(j); + if (bLoop.depth() == aLoop.depth() && bLoop.boundaryApproxEquals(aLoop, maxError)) { + success = true; + break; + } + } + if (!success) { + return false; + } + } + return true; + } + + // S2Region interface (see S2Region.java for details): + + /** Return a bounding spherical cap. */ + @Override + public S2Cap getCapBound() { + return bound.getCapBound(); + } + + + /** Return a bounding latitude-longitude rectangle. */ + @Override + public S2LatLngRect getRectBound() { + return bound; + } + + /** + * If this method returns true, the region completely contains the given cell. + * Otherwise, either the region does not contain the cell or the containment + * relationship could not be determined. + */ + @Override + public boolean contains(S2Cell cell) { + if (numLoops() == 1) { + return loop(0).contains(cell); + } + S2LatLngRect cellBound = cell.getRectBound(); + if (!bound.contains(cellBound)) { + return false; + } + + S2Loop cellLoop = new S2Loop(cell, cellBound); + S2Polygon cellPoly = new S2Polygon(cellLoop); + return contains(cellPoly); + } + + /** + * If this method returns false, the region does not intersect the given cell. + * Otherwise, either region intersects the cell, or the intersection + * relationship could not be determined. + */ + @Override + public boolean mayIntersect(S2Cell cell) { + if (numLoops() == 1) { + return loop(0).mayIntersect(cell); + } + S2LatLngRect cellBound = cell.getRectBound(); + if (!bound.intersects(cellBound)) { + return false; + } + + S2Loop cellLoop = new S2Loop(cell, cellBound); + S2Polygon cellPoly = new S2Polygon(cellLoop); + return intersects(cellPoly); + } + + /** + * The point 'p' does not need to be normalized. + */ + public boolean contains(S2Point p) { + if (numLoops() == 1) { + return loop(0).contains(p); // Optimization. + } + if (!bound.contains(p)) { + return false; + } + boolean inside = false; + for (int i = 0; i < numLoops(); ++i) { + inside ^= loop(i).contains(p); + if (inside && !hasHoles) { + break; // Shells are disjoint. + } + } + return inside; + } + + // For each map entry, sorts the value list. + private static void sortValueLoops(Map<S2Loop, List<S2Loop>> loopMap) { + for (S2Loop key : loopMap.keySet()) { + Collections.sort(loopMap.get(key)); + } + } + + private static void insertLoop(S2Loop newLoop, S2Loop parent, Map<S2Loop, List<S2Loop>> loopMap) { + List<S2Loop> children = loopMap.get(parent); + + if (children == null) { + children = Lists.newArrayList(); + loopMap.put(parent, children); + } + + for (S2Loop child : children) { + if (child.containsNested(newLoop)) { + insertLoop(newLoop, child, loopMap); + return; + } + } + + // No loop may contain the complement of another loop. (Handling this case + // is significantly more complicated.) + // assert (parent == null || !newLoop.containsNested(parent)); + + // Some of the children of the parent loop may now be children of + // the new loop. + List<S2Loop> newChildren = loopMap.get(newLoop); + for (int i = 0; i < children.size();) { + S2Loop child = children.get(i); + if (newLoop.containsNested(child)) { + if (newChildren == null) { + newChildren = Lists.newArrayList(); + loopMap.put(newLoop, newChildren); + } + newChildren.add(child); + children.remove(i); + } else { + ++i; + } + } + children.add(newLoop); + } + + private void initLoop(S2Loop loop, int depth, Map<S2Loop, List<S2Loop>> loopMap) { + if (loop != null) { + loop.setDepth(depth); + loops.add(loop); + } + List<S2Loop> children = loopMap.get(loop); + if (children != null) { + for (S2Loop child : children) { + initLoop(child, depth + 1, loopMap); + } + } + } + + private int containsOrCrosses(S2Loop b) { + boolean inside = false; + for (int i = 0; i < numLoops(); ++i) { + int result = loop(i).containsOrCrosses(b); + if (result < 0) { + return -1; // The loop boundaries intersect. + } + if (result > 0) { + inside ^= true; + } + } + return inside ? 1 : 0; // True if loop B is contained by the polygon. + } + + /** Return true if any loop contains the given loop. */ + private boolean anyLoopContains(S2Loop b) { + for (int i = 0; i < numLoops(); ++i) { + if (loop(i).contains(b)) { + return true; + } + } + return false; + } + + /** Return true if this polygon (A) contains all the shells of B. */ + private boolean containsAllShells(S2Polygon b) { + for (int j = 0; j < b.numLoops(); ++j) { + if (b.loop(j).sign() < 0) { + continue; + } + if (containsOrCrosses(b.loop(j)) <= 0) { + // Shell of B is not contained by A, or the boundaries intersect. + return false; + } + } + return true; + } + + /** + * Return true if this polygon (A) excludes (i.e. does not intersect) all + * holes of B. + */ + private boolean excludesAllHoles(S2Polygon b) { + for (int j = 0; j < b.numLoops(); ++j) { + if (b.loop(j).sign() > 0) { + continue; + } + if (containsOrCrosses(b.loop(j)) != 0) { + // Hole of B is contained by A, or the boundaries intersect. + return false; + } + } + return true; + } + + /** Return true if this polygon (A) intersects any shell of B. */ + private boolean intersectsAnyShell(S2Polygon b) { + for (int j = 0; j < b.numLoops(); ++j) { + if (b.loop(j).sign() < 0) { + continue; + } + if (containsOrCrosses(b.loop(j)) != 0) { + // Shell of B is contained by A, or the boundaries intersect. + return true; + } + } + return false; + } + + /** + * A human readable representation of the polygon + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Polygon: (").append(numLoops()).append(") loops:\n"); + for (int i = 0; i < numLoops(); ++i) { + S2Loop s2Loop = loop(i); + sb.append("loop <\n"); + for (int v = 0; v < s2Loop.numVertices(); ++v) { + S2Point s2Point = s2Loop.vertex(v); + sb.append(s2Point.toDegreesString()); + sb.append("\n"); // end of vertex + } + sb.append(">\n"); // end of loop + } + return sb.toString(); + } + + private static class UndirectedEdge extends S2Edge { + + public UndirectedEdge(S2Point vertexA, S2Point vertexB) { + super(vertexA, vertexB); + } + + @Override + public boolean equals(Object o) { + if (o == null || !(o instanceof UndirectedEdge)) { + return false; + } + UndirectedEdge other = (UndirectedEdge) o; + return ((getStart().equals(other.getStart()) && getEnd().equals(other.getEnd())) + || (getStart().equals(other.getEnd()) && getEnd().equals(other.getStart()))); + } + + @Override + public int hashCode() { + return getStart().hashCode() + getEnd().hashCode(); + } + } + + private static class LoopVertexIndexPair { + + private final int loopIndex; + private final int vertexIndex; + + public LoopVertexIndexPair(int loopIndex, int vertexIndex) { + this.loopIndex = loopIndex; + this.vertexIndex = vertexIndex; + } + + public int getLoopIndex() { + return loopIndex; + } + + public int getVertexIndex() { + return vertexIndex; + } + } + + /** + * An S2Point that also has a parameter associated with it, which corresponds + * to a time-like order on the points. + */ + private static class ParametrizedS2Point implements Comparable<ParametrizedS2Point> { + + private final double time; + private final S2Point point; + + public ParametrizedS2Point(double time, S2Point point) { + this.time = time; + this.point = point; + } + + public double getTime() { + return time; + } + + public S2Point getPoint() { + return point; + } + + @Override + public int compareTo(ParametrizedS2Point o) { + int compareTime = Double.compare(time, o.getTime()); + if (compareTime != 0) { + return compareTime; + } + return point.compareTo(o.getPoint()); + } + + } +} diff --git a/src/com/google/common/geometry/S2PolygonBuilder.java b/src/com/google/common/geometry/S2PolygonBuilder.java new file mode 100644 index 0000000..4adedb2 --- /dev/null +++ b/src/com/google/common/geometry/S2PolygonBuilder.java @@ -0,0 +1,718 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.ForwardingMultimap; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Multimap; +import com.google.common.collect.Multiset; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Stack; +import java.util.logging.Logger; + +/** + * This is a simple class for assembling polygons out of edges. It requires that + * no two edges cross. It can handle both directed and undirected edges, and + * optionally it can also remove duplicate edge pairs (consisting of two + * identical edges or an edge and its reverse edge). This is useful for + * computing seamless unions of polygons that have been cut into pieces. + * + * Here are some of the situations this class was designed to handle: + * + * 1. Computing the union of disjoint polygons that may share part of their + * boundaries. For example, reassembling a lake that has been split into two + * loops by a state boundary. + * + * 2. Constructing polygons from input data that does not follow S2 + * conventions, i.e. where loops may have repeated vertices, or distinct loops + * may share edges, or shells and holes have opposite or unspecified + * orientations. + * + * 3. Computing the symmetric difference of a set of polygons whose edges + * intersect only at vertices. This can be used to implement a limited form of + * polygon intersection or subtraction as well as unions. + * + * 4. As a tool for implementing other polygon operations by generating a + * collection of directed edges and then assembling them into loops. + * + */ +public strictfp class S2PolygonBuilder { + private static final Logger log = Logger.getLogger(S2PolygonBuilder.class.getCanonicalName()); + + private Options options; + + /** + * The current set of edges, grouped by origin. The set of destination + * vertices is a multiset so that the same edge can be present more than once. + */ + private Map<S2Point, Multiset<S2Point>> edges; + + /** + * Default constructor for well-behaved polygons. Uses the DIRECTED_XOR + * options. + */ + public S2PolygonBuilder() { + this(Options.DIRECTED_XOR); + + } + + public S2PolygonBuilder(Options options) { + this.options = options; + this.edges = Maps.newHashMap(); + } + + public enum Options { + + /** + * These are the options that should be used for assembling well-behaved + * input data into polygons. All edges should be directed such that "shells" + * and "holes" have opposite orientations (typically CCW shells and + * clockwise holes), unless it is known that shells and holes do not share + * any edges. + */ + DIRECTED_XOR(false, true), + + /** + * These are the options that should be used for assembling polygons that do + * not follow the conventions above, e.g. where edge directions may vary + * within a single loop, or shells and holes are not oppositely oriented. + */ + UNDIRECTED_XOR(true, true), + + /** + * These are the options that should be used for assembling edges where the + * desired output is a collection of loops rather than a polygon, and edges + * may occur more than once. Edges are treated as undirected and are not + * XORed together, in particular, adding edge A->B also adds B->A. + */ + UNDIRECTED_UNION(true, false), + + /** + * Finally, select this option when the desired output is a collection of + * loops rather than a polygon, but your input edges are directed and you do + * not want reverse edges to be added implicitly as above. + */ + DIRECTED_UNION(false, false); + + private boolean undirectedEdges; + private boolean xorEdges; + private boolean validate; + private S1Angle mergeDistance; + + private Options(boolean undirectedEdges, boolean xorEdges) { + this.undirectedEdges = undirectedEdges; + this.xorEdges = xorEdges; + this.validate = false; + this.mergeDistance = S1Angle.radians(0); + } + + /** + * If "undirected_edges" is false, then the input is assumed to consist of + * edges that can be assembled into oriented loops without reversing any of + * the edges. Otherwise, "undirected_edges" should be set to true. + */ + public boolean getUndirectedEdges() { + return undirectedEdges; + } + + /** + * If "xor_edges" is true, then any duplicate edge pairs are removed. This + * is useful for computing the union of a collection of polygons whose + * interiors are disjoint but whose boundaries may share some common edges + * (e.g. computing the union of South Africa, Lesotho, and Swaziland). + * + * Note that for directed edges, a "duplicate edge pair" consists of an + * edge and its corresponding reverse edge. This means that either (a) + * "shells" and "holes" must have opposite orientations, or (b) shells and + * holes do not share edges. Otherwise undirected_edges() should be + * specified. + * + * There are only two reasons to turn off xor_edges(): + * + * (1) assemblePolygon() will be called, and you want to assert that there + * are no duplicate edge pairs in the input. + * + * (2) assembleLoops() will be called, and you want to keep abutting loops + * separate in the output rather than merging their regions together (e.g. + * assembling loops for Kansas City, KS and Kansas City, MO simultaneously). + */ + public boolean getXorEdges() { + return xorEdges; + } + + /** + * Default value: false + */ + public boolean getValidate() { + return validate; + } + + /** + * Default value: 0 + */ + public S1Angle getMergeDistance() { + return mergeDistance; + } + + /** + * If true, isValid() is called on all loops and polygons before + * constructing them. If any loop is invalid (e.g. self-intersecting), it is + * rejected and returned as a set of "unused edges". Any remaining valid + * loops are kept. If the entire polygon is invalid (e.g. two loops + * intersect), then all loops are rejected and returned as unused edges. + */ + public void setValidate(boolean validate) { + this.validate = validate; + } + + /** + * If set to a positive value, all vertices that are separated by at most + * this distance will be merged together. In addition, vertices that are + * closer than this distance to a non-incident edge will be spliced into it + * (TODO). + * + * The merging is done in such a way that all vertex-vertex and vertex-edge + * distances in the output are greater than 'merge_distance'. + * + * This method is useful for assembling polygons out of input data where + * vertices and/or edges may not be perfectly aligned. + */ + public void setMergeDistance(S1Angle mergeDistance) { + this.mergeDistance = mergeDistance; + } + + // Used for testing only + void setUndirectedEdges(boolean undirectedEdges) { + this.undirectedEdges = undirectedEdges; + } + + // Used for testing only + void setXorEdges(boolean xorEdges) { + this.xorEdges = xorEdges; + } + } + + public Options options() { + return options; + } + + /** + * Add the given edge to the polygon builder. This method should be used for + * input data that may not follow S2 polygon conventions. Note that edges are + * not allowed to cross each other. Also note that as a convenience, edges + * where v0 == v1 are ignored. + */ + public void addEdge(S2Point v0, S2Point v1) { + // If xor_edges is true, we look for an existing edge in the opposite + // direction. We either delete that edge or insert a new one. + + if (v0.equals(v1)) { + return; + } + + if (options.getXorEdges()) { + Multiset<S2Point> candidates = edges.get(v1); + if (candidates != null && candidates.count(v0) > 0) { + eraseEdge(v1, v0); + return; + } + } + + if (edges.get(v0) == null) { + edges.put(v0, HashMultiset.<S2Point>create()); + } + + edges.get(v0).add(v1); + if (options.getUndirectedEdges()) { + if (edges.get(v1) == null) { + edges.put(v1, HashMultiset.<S2Point>create()); + } + edges.get(v1).add(v0); + } + } + + /** + * Add all edges in the given loop. If the sign() of the loop is negative + * (i.e. this loop represents a hole), the reverse edges are added instead. + * This implies that "shells" are CCW and "holes" are CW, as required for the + * directed edges convention described above. + * + * This method does not take ownership of the loop. + */ + public void addLoop(S2Loop loop) { + int sign = loop.sign(); + for (int i = loop.numVertices(); i > 0; --i) { + // Vertex indices need to be in the range [0, 2*num_vertices()-1]. + addEdge(loop.vertex(i), loop.vertex(i + sign)); + } + } + + /** + * Add all loops in the given polygon. Shells and holes are added with + * opposite orientations as described for AddLoop(). This method does not take + * ownership of the polygon. + */ + public void addPolygon(S2Polygon polygon) { + for (int i = 0; i < polygon.numLoops(); ++i) { + addLoop(polygon.loop(i)); + } + } + + /** + * Assembles the given edges into as many non-crossing loops as possible. When + * there is a choice about how to assemble the loops, then CCW loops are + * preferred. Returns true if all edges were assembled. If "unused_edges" is + * not NULL, it is initialized to the set of edges that could not be assembled + * into loops. + * + * Note that if xor_edges() is false and duplicate edge pairs may be present, + * then undirected_edges() should be specified unless all loops can be + * assembled in a counter-clockwise direction. Otherwise this method may not + * be able to assemble all loops due to its preference for CCW loops. + * + * This method resets the S2PolygonBuilder state so that it can be reused. + */ + public boolean assembleLoops(List<S2Loop> loops, List<S2Edge> unusedEdges) { + if (options.getMergeDistance().radians() > 0) { + mergeVertices(); + } + + List<S2Edge> dummyUnusedEdges = Lists.newArrayList(); + if (unusedEdges == null) { + unusedEdges = dummyUnusedEdges; + } + + // We repeatedly choose an arbitrary edge and attempt to assemble a loop + // starting from that edge. (This is always possible unless the input + // includes extra edges that are not part of any loop.) + + unusedEdges.clear(); + while (!edges.isEmpty()) { + Map.Entry<S2Point, Multiset<S2Point>> edge = edges.entrySet().iterator().next(); + + S2Point v0 = edge.getKey(); + S2Point v1 = edge.getValue().iterator().next(); + + S2Loop loop = assembleLoop(v0, v1, unusedEdges); + if (loop == null) { + continue; + } + + // In the case of undirected edges, we may have assembled a clockwise + // loop while trying to assemble a CCW loop. To fix this, we assemble + // a new loop starting with an arbitrary edge in the reverse direction. + // This is guaranteed to assemble a loop that is interior to the previous + // one and will therefore eventually terminate. + + while (options.getUndirectedEdges() && !loop.isNormalized()) { + loop = assembleLoop(loop.vertex(1), loop.vertex(0), unusedEdges); + } + loops.add(loop); + eraseLoop(loop, loop.numVertices()); + } + return unusedEdges.isEmpty(); + } + + /** + * Like AssembleLoops, but normalizes all the loops so that they enclose less + * than half the sphere, and then assembles the loops into a polygon. + * + * For this method to succeed, there should be no duplicate edges in the + * input. If this is not known to be true, then the "xor_edges" option should + * be set (which is true by default). + * + * Note that S2Polygons cannot represent arbitrary regions on the sphere, + * because of the limitation that no loop encloses more than half of the + * sphere. For example, an S2Polygon cannot represent a 100km wide band around + * the equator. In such cases, this method will return the *complement* of the + * expected region. So for example if all the world's coastlines were + * assembled, the output S2Polygon would represent the land area (irrespective + * of the input edge or loop orientations). + */ + public boolean assemblePolygon(S2Polygon polygon, List<S2Edge> unusedEdges) { + List<S2Loop> loops = Lists.newArrayList(); + boolean success = assembleLoops(loops, unusedEdges); + + // If edges are undirected, then all loops are already CCW. Otherwise we + // need to make sure the loops are normalized. + if (!options.getUndirectedEdges()) { + for (int i = 0; i < loops.size(); ++i) { + loops.get(i).normalize(); + } + } + if (options.getValidate() && !S2Polygon.isValid(loops)) { + if (unusedEdges != null) { + for (S2Loop loop : loops) { + rejectLoop(loop, loop.numVertices(), unusedEdges); + } + } + return false; + } + polygon.init(loops); + return success; + } + + /** + * Convenience method for when you don't care about unused edges. + */ + public S2Polygon assemblePolygon() { + S2Polygon polygon = new S2Polygon(); + List<S2Edge> unusedEdges = Lists.newArrayList(); + + assemblePolygon(polygon, unusedEdges); + + return polygon; + } + + // Debugging functions: + + protected void dumpEdges(S2Point v0) { + log.info(v0.toString()); + Multiset<S2Point> vset = edges.get(v0); + if (vset != null) { + for (S2Point v : vset) { + log.info(" " + v.toString()); + } + } + } + + protected void dump() { + for (S2Point v : edges.keySet()) { + dumpEdges(v); + } + } + + private void eraseEdge(S2Point v0, S2Point v1) { + // Note that there may be more than one copy of an edge if we are not XORing + // them, so a VertexSet is a multiset. + + Multiset<S2Point> vset = edges.get(v0); + // assert (vset.count(v1) > 0); + vset.remove(v1); + if (vset.isEmpty()) { + edges.remove(v0); + } + + if (options.getUndirectedEdges()) { + vset = edges.get(v1); + // assert (vset.count(v0) > 0); + vset.remove(v0); + if (vset.isEmpty()) { + edges.remove(v1); + } + } + } + + private void eraseLoop(List<S2Point> v, int n) { + for (int i = n - 1, j = 0; j < n; i = j++) { + eraseEdge(v.get(i), v.get(j)); + } + } + + private void eraseLoop(S2Loop v, int n) { + for (int i = n - 1, j = 0; j < n; i = j++) { + eraseEdge(v.vertex(i), v.vertex(j)); + } + } + + /** + * We start at the given edge and assemble a loop taking left turns whenever + * possible. We stop the loop as soon as we encounter any vertex that we have + * seen before *except* for the first vertex (v0). This ensures that only CCW + * loops are constructed when possible. + */ + private S2Loop assembleLoop(S2Point v0, S2Point v1, List<S2Edge> unusedEdges) { + + // The path so far. + List<S2Point> path = Lists.newArrayList(); + + // Maps a vertex to its index in "path". + Map<S2Point, Integer> index = Maps.newHashMap(); + path.add(v0); + path.add(v1); + + index.put(v1, 1); + + while (path.size() >= 2) { + // Note that "v0" and "v1" become invalid if "path" is modified. + v0 = path.get(path.size() - 2); + v1 = path.get(path.size() - 1); + + S2Point v2 = null; + boolean v2Found = false; + Multiset<S2Point> vset = edges.get(v1); + if (vset != null) { + for (S2Point v : vset) { + // We prefer the leftmost outgoing edge, ignoring any reverse edges. + if (v.equals(v0)) { + continue; + } + if (!v2Found || S2.orderedCCW(v0, v2, v, v1)) { + v2 = v; + } + v2Found = true; + } + } + if (!v2Found) { + // We've hit a dead end. Remove this edge and backtrack. + unusedEdges.add(new S2Edge(v0, v1)); + eraseEdge(v0, v1); + index.remove(v1); + path.remove(path.size() - 1); + } else if (index.get(v2) == null) { + // This is the first time we've visited this vertex. + index.put(v2, path.size()); + path.add(v2); + } else { + // We've completed a loop. Throw away any initial vertices that + // are not part of the loop. + for (int i = 0; i < index.get(v2); ++i) { + path.remove(0); + } + + if (options.getValidate() && !S2Loop.isValid(path)) { + // We've constructed a loop that crosses itself, which can only happen + // if there is bad input data. Throw away the whole loop. + rejectLoop(path, path.size(), unusedEdges); + eraseLoop(path, path.size()); + return null; + } + return new S2Loop(path); + } + } + return null; + } + + /** Erases all edges of the given loop and marks them as unused. */ + private void rejectLoop(S2Loop v, int n, List<S2Edge> unusedEdges) { + for (int i = n - 1, j = 0; j < n; i = j++) { + unusedEdges.add(new S2Edge(v.vertex(i), v.vertex(j))); + } + } + + /** Erases all edges of the given loop and marks them as unused. */ + private void rejectLoop(List<S2Point> v, int n, List<S2Edge> unusedEdges) { + for (int i = n - 1, j = 0; j < n; i = j++) { + unusedEdges.add(new S2Edge(v.get(i), v.get(j))); + } + } + + /** Moves a set of vertices from old to new positions. */ + private void moveVertices(Map<S2Point, S2Point> mergeMap) { + if (mergeMap.isEmpty()) { + return; + } + + // We need to copy the set of edges affected by the move, since + // this.edges_could be reallocated when we start modifying it. + List<S2Edge> edgesCopy = Lists.newArrayList(); + for (Map.Entry<S2Point, Multiset<S2Point>> edge : this.edges.entrySet()) { + S2Point v0 = edge.getKey(); + Multiset<S2Point> vset = edge.getValue(); + for (S2Point v1 : vset) { + if (mergeMap.get(v0) != null || mergeMap.get(v1) != null) { + + // We only need to modify one copy of each undirected edge. + if (!options.getUndirectedEdges() || v0.lessThan(v1)) { + edgesCopy.add(new S2Edge(v0, v1)); + } + } + } + } + + // Now erase all the old edges, and add all the new edges. This will + // automatically take care of any XORing that needs to be done, because + // EraseEdge also erases the sibiling of undirected edges. + for (int i = 0; i < edgesCopy.size(); ++i) { + S2Point v0 = edgesCopy.get(i).getStart(); + S2Point v1 = edgesCopy.get(i).getEnd(); + eraseEdge(v0, v1); + if (mergeMap.get(v0) != null) { + v0 = mergeMap.get(v0); + } + if (mergeMap.get(v1) != null) { + v1 = mergeMap.get(v1); + } + addEdge(v0, v1); + } + } + + /** + * Look for groups of vertices that are separated by at most merge_distance() + * and merge them into a single vertex. + */ + private void mergeVertices() { + // The overall strategy is to start from each vertex and grow a maximal + // cluster of mergable vertices. In graph theoretic terms, we find the + // connected components of the undirected graph whose edges connect pairs of + // vertices that are separated by at most merge_distance. + // + // We then choose a single representative vertex for each cluster, and + // update all the edges appropriately. We choose an arbitrary existing + // vertex rather than computing the centroid of all the vertices to avoid + // creating new vertex pairs that need to be merged. (We guarantee that all + // vertex pairs are separated by at least merge_distance in the output.) + + PointIndex index = new PointIndex(options.getMergeDistance().radians()); + + for (Map.Entry<S2Point, Multiset<S2Point>> edge : edges.entrySet()) { + index.add(edge.getKey()); + Multiset<S2Point> vset = edge.getValue(); + for (S2Point v : vset) { + index.add(v); + } + } + + // Next, we loop through all the vertices and attempt to grow a maximial + // mergeable group starting from each vertex. + + Map<S2Point, S2Point> mergeMap = Maps.newHashMap(); + Stack<S2Point> frontier = new Stack<S2Point>(); + List<S2Point> mergeable = Lists.newArrayList(); + + for (Map.Entry<S2CellId, MarkedS2Point> entry : index.entries()) { + MarkedS2Point point = entry.getValue(); + if (point.isMarked()) { + continue; // Already processed. + } + + point.mark(); + + // Grow a maximal mergeable component starting from "vstart", the + // canonical representative of the mergeable group. + S2Point vstart = point.getPoint(); + frontier.push(vstart); + while (!frontier.isEmpty()) { + S2Point v0 = frontier.pop(); + + index.query(v0, mergeable); + for (S2Point v1 : mergeable) { + frontier.push(v1); + mergeMap.put(v1, vstart); + } + } + } + + // Finally, we need to replace vertices according to the merge_map. + moveVertices(mergeMap); + } + + /** + * A PointIndex is a cheap spatial index to help us find mergeable vertices. + * Given a set of points, it can efficiently find all of the points within a + * given search radius of an arbitrary query location. It is essentially just + * a hash map from cell ids at a given fixed level to the set of points + * contained by that cell id. + * + * This class is not suitable for general use because it only supports + * fixed-radius queries and has various special-purpose operations to avoid + * the need for additional data structures. + */ + private class PointIndex extends ForwardingMultimap<S2CellId, MarkedS2Point> { + private double searchRadius; + private int level; + private final Multimap<S2CellId, MarkedS2Point> delegate = HashMultimap.create(); + + public PointIndex(double searchRadius) { + + this.searchRadius = searchRadius; + + // We choose a cell level such that if dist(A,B) <= search_radius, the + // S2CellId at that level containing A is a vertex neighbor of B (see + // S2CellId.getVertexNeighbors). This turns out to be the highest + // level such that a spherical cap (i.e. "disc") of the given radius + // fits completely inside all cells at that level. + this.level = + Math.min(S2Projections.MIN_WIDTH.getMaxLevel(2 * searchRadius), S2CellId.MAX_LEVEL - 1); + } + + @Override + protected Multimap<S2CellId, MarkedS2Point> delegate() { + return delegate; + } + + /** Add a point to the index if it does not already exist. */ + public void add(S2Point p) { + S2CellId id = S2CellId.fromPoint(p).parent(level); + Collection<MarkedS2Point> pointSet = get(id); + for (MarkedS2Point point : pointSet) { + if (point.getPoint().equals(p)) { + return; + } + } + put(id, new MarkedS2Point(p)); + } + + /** + * Return the set the unmarked points whose distance to "center" is less + * than search_radius_, and mark these points. By construction, these points + * will be contained by one of the vertex neighbors of "center". + */ + public void query(S2Point center, List<S2Point> output) { + output.clear(); + + List<S2CellId> neighbors = Lists.newArrayList(); + S2CellId.fromPoint(center).getVertexNeighbors(level, neighbors); + for (S2CellId id : neighbors) { + // Iterate over the points contained by each vertex neighbor. + for (MarkedS2Point mp : get(id)) { + if (mp.isMarked()) { + continue; + } + S2Point p = mp.getPoint(); + + if (center.angle(p) <= searchRadius) { + output.add(p); + mp.mark(); + } + } + } + } + } + + /** + * An S2Point that can be marked. Used in PointIndex. + */ + private class MarkedS2Point { + private S2Point point; + private boolean mark; + + public MarkedS2Point(S2Point point) { + this.point = point; + this.mark = false; + } + + public boolean isMarked() { + return mark; + } + + public S2Point getPoint() { + return point; + } + + public void mark() { + // assert (!isMarked()); + this.mark = true; + } + } +} diff --git a/src/com/google/common/geometry/S2Polyline.java b/src/com/google/common/geometry/S2Polyline.java new file mode 100644 index 0000000..8b140f2 --- /dev/null +++ b/src/com/google/common/geometry/S2Polyline.java @@ -0,0 +1,289 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Objects; +import com.google.common.base.Preconditions; + +import java.util.Arrays; +import java.util.List; +import java.util.logging.Logger; + +/** + * An S2Polyline represents a sequence of zero or more vertices connected by + * straight edges (geodesics). Edges of length 0 and 180 degrees are not + * allowed, i.e. adjacent vertices should not be identical or antipodal. + * + */ +public strictfp class S2Polyline implements S2Region { + private static final Logger log = Logger.getLogger(S2Polyline.class.getCanonicalName()); + + private int numVertices; + + private S2Point[] vertices; + + // TODO(kirilll): Get rid of debug mode. Turn it into tests. + @VisibleForTesting + static boolean debugMode = false; + + /** + * Create a polyline that connects the given vertices. Empty polylines are + * allowed. Adjacent vertices should not be identical or antipodal. All + * vertices should be unit length. + * + * @param vertices + */ + public S2Polyline(List<S2Point> vertices) { + this.numVertices = vertices.size(); + this.vertices = new S2Point[numVertices]; + + if (debugMode) { + // assert (isValid(vertices)); + } + + if (numVertices > 0) { + vertices.toArray(this.vertices); + } + } + + /** + * Copy constructor. + */ + public S2Polyline(S2Polyline src) { + this.numVertices = src.numVertices(); + this.vertices = src.vertices.clone(); + } + + /** + * Return true if the given vertices form a valid polyline. + */ + public boolean isValid(List<S2Point> vertices) { + // All vertices must be unit length. + int n = vertices.size(); + for (int i = 0; i < n; ++i) { + if (!S2.isUnitLength(vertices.get(i))) { + log.info("Vertex " + i + " is not unit length"); + return false; + } + } + + // Adjacent vertices must not be identical or antipodal. + for (int i = 1; i < n; ++i) { + if (vertices.get(i - 1).equals(vertices.get(i)) + || vertices.get(i - 1).equals(S2Point.neg(vertices.get(i)))) { + log.info("Vertices " + (i - 1) + " and " + i + " are identical or antipodal"); + return false; + } + } + + return true; + } + + public int numVertices() { + return numVertices; + } + + public S2Point vertex(int k) { + // assert (k >= 0 && k < numVertices); + return vertices[k]; + } + + /** + * Return the angle corresponding to the total arclength of the polyline on a + * unit sphere. + */ + public S1Angle getArclengthAngle() { + double lengthSum = 0; + for (int i = 1; i < numVertices(); ++i) { + lengthSum += vertex(i - 1).angle(vertex(i)); + } + return S1Angle.radians(lengthSum); + } + + /** + * Return the point whose distance from vertex 0 along the polyline is the + * given fraction of the polyline's total length. Fractions less than zero or + * greater than one are clamped. The return value is unit length. This cost of + * this function is currently linear in the number of vertices. + */ + public S2Point interpolate(double fraction) { + // We intentionally let the (fraction >= 1) case fall through, since + // we need to handle it in the loop below in any case because of + // possible roundoff errors. + if (fraction <= 0) { + return vertex(0); + } + + double lengthSum = 0; + for (int i = 1; i < numVertices(); ++i) { + lengthSum += vertex(i - 1).angle(vertex(i)); + } + double target = fraction * lengthSum; + for (int i = 1; i < numVertices(); ++i) { + double length = vertex(i - 1).angle(vertex(i)); + if (target < length) { + // This code interpolates with respect to arc length rather than + // straight-line distance, and produces a unit-length result. + double f = Math.sin(target) / Math.sin(length); + return S2Point.add(S2Point.mul(vertex(i - 1), (Math.cos(target) - f * Math.cos(length))), + S2Point.mul(vertex(i), f)); + } + target -= length; + } + return vertex(numVertices() - 1); + } + + // S2Region interface (see {@code S2Region} for details): + + /** Return a bounding spherical cap. */ + @Override + public S2Cap getCapBound() { + return getRectBound().getCapBound(); + } + + + /** Return a bounding latitude-longitude rectangle. */ + @Override + public S2LatLngRect getRectBound() { + S2EdgeUtil.RectBounder bounder = new S2EdgeUtil.RectBounder(); + for (int i = 0; i < numVertices(); ++i) { + bounder.addPoint(vertex(i)); + } + return bounder.getBound(); + } + + /** + * If this method returns true, the region completely contains the given cell. + * Otherwise, either the region does not contain the cell or the containment + * relationship could not be determined. + */ + @Override + public boolean contains(S2Cell cell) { + throw new UnsupportedOperationException( + "'containment' is not numerically well-defined " + "except at the polyline vertices"); + } + + /** + * If this method returns false, the region does not intersect the given cell. + * Otherwise, either region intersects the cell, or the intersection + * relationship could not be determined. + */ + @Override + public boolean mayIntersect(S2Cell cell) { + if (numVertices() == 0) { + return false; + } + + // We only need to check whether the cell contains vertex 0 for correctness, + // but these tests are cheap compared to edge crossings so we might as well + // check all the vertices. + for (int i = 0; i < numVertices(); ++i) { + if (cell.contains(vertex(i))) { + return true; + } + } + S2Point[] cellVertices = new S2Point[4]; + for (int i = 0; i < 4; ++i) { + cellVertices[i] = cell.getVertex(i); + } + for (int j = 0; j < 4; ++j) { + S2EdgeUtil.EdgeCrosser crosser = + new S2EdgeUtil.EdgeCrosser(cellVertices[j], cellVertices[(j + 1) & 3], vertex(0)); + for (int i = 1; i < numVertices(); ++i) { + if (crosser.robustCrossing(vertex(i)) >= 0) { + // There is a proper crossing, or two vertices were the same. + return true; + } + } + } + return false; + } + + /** + * Given a point, returns the index of the start point of the (first) edge on + * the polyline that is closest to the given point. The polyline must have at + * least one vertex. Throws IllegalStateException if this is not the case. + */ + public int getNearestEdgeIndex(S2Point point) { + Preconditions.checkState(numVertices() > 0, "Empty polyline"); + + if (numVertices() == 1) { + // If there is only one vertex, the "edge" is trivial, and it's the only one + return 0; + } + + // Initial value larger than any possible distance on the unit sphere. + S1Angle minDistance = S1Angle.radians(10); + int minIndex = -1; + + // Find the line segment in the polyline that is closest to the point given. + for (int i = 0; i < numVertices() - 1; ++i) { + S1Angle distanceToSegment = S2EdgeUtil.getDistance(point, vertex(i), vertex(i + 1)); + if (distanceToSegment.lessThan(minDistance)) { + minDistance = distanceToSegment; + minIndex = i; + } + } + + return minIndex; + } + + /** + * Given a point p and the index of the start point of an edge of this polyline, + * returns the point on that edge that is closest to p. + */ + public S2Point projectToEdge(S2Point point, int index) { + Preconditions.checkState(numVertices() > 0, "Empty polyline"); + Preconditions.checkState(numVertices() == 1 || index < numVertices() - 1, "Invalid edge index"); + if (numVertices() == 1) { + // If there is only one vertex, it is always closest to any given point. + return vertex(0); + } + return S2EdgeUtil.getClosestPoint(point, vertex(index), vertex(index + 1)); + } + + @Override + public boolean equals(Object that) { + if (!(that instanceof S2Polyline)) { + return false; + } + + S2Polyline thatPolygon = (S2Polyline) that; + + if (numVertices != thatPolygon.numVertices) { + return false; + } + + for (int i = 0; i < vertices.length; i++) { + if (!vertices[i].equals(thatPolygon.vertices[i])) { + return false; + } + } + + return true; + } + + @Override + public int hashCode() { + return Objects.hashCode(numVertices, Arrays.deepHashCode(vertices)); + } + + + // Polylines do not have a Contains(S2Point) method, because "containment" + // is not numerically well-defined except at the polyline vertices. +} diff --git a/src/com/google/common/geometry/S2Projections.java b/src/com/google/common/geometry/S2Projections.java new file mode 100644 index 0000000..7032799 --- /dev/null +++ b/src/com/google/common/geometry/S2Projections.java @@ -0,0 +1,438 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.geometry.S2.Metric; + +/** + * This class specifies the details of how the cube faces are projected onto the + * unit sphere. This includes getting the face ordering and orientation correct + * so that sequentially increasing cell ids follow a continuous space-filling + * curve over the entire sphere, and defining the transformation from cell-space + * to cube-space (see s2.h) in order to make the cells more uniform in size. + * + * + * We have implemented three different projections from cell-space (s,t) to + * cube-space (u,v): linear, quadratic, and tangent. They have the following + * tradeoffs: + * + * Linear - This is the fastest transformation, but also produces the least + * uniform cell sizes. Cell areas vary by a factor of about 5.2, with the + * largest cells at the center of each face and the smallest cells in the + * corners. + * + * Tangent - Transforming the coordinates via atan() makes the cell sizes more + * uniform. The areas vary by a maximum ratio of 1.4 as opposed to a maximum + * ratio of 5.2. However, each call to atan() is about as expensive as all of + * the other calculations combined when converting from points to cell ids, i.e. + * it reduces performance by a factor of 3. + * + * Quadratic - This is an approximation of the tangent projection that is much + * faster and produces cells that are almost as uniform in size. It is about 3 + * times faster than the tangent projection for converting cell ids to points, + * and 2 times faster for converting points to cell ids. Cell areas vary by a + * maximum ratio of about 2.1. + * + * Here is a table comparing the cell uniformity using each projection. "Area + * ratio" is the maximum ratio over all subdivision levels of the largest cell + * area to the smallest cell area at that level, "edge ratio" is the maximum + * ratio of the longest edge of any cell to the shortest edge of any cell at the + * same level, and "diag ratio" is the ratio of the longest diagonal of any cell + * to the shortest diagonal of any cell at the same level. "ToPoint" and + * "FromPoint" are the times in microseconds required to convert cell ids to and + * from points (unit vectors) respectively. + * + * Area Edge Diag ToPoint FromPoint Ratio Ratio Ratio (microseconds) + * ------------------------------------------------------- Linear: 5.200 2.117 + * 2.959 0.103 0.123 Tangent: 1.414 1.414 1.704 0.290 0.306 Quadratic: 2.082 + * 1.802 1.932 0.116 0.161 + * + * The worst-case cell aspect ratios are about the same with all three + * projections. The maximum ratio of the longest edge to the shortest edge + * within the same cell is about 1.4 and the maximum ratio of the diagonals + * within the same cell is about 1.7. + * + * This data was produced using s2cell_unittest and s2cellid_unittest. + * + */ + +public strictfp class S2Projections { + public enum Projections { + S2_LINEAR_PROJECTION, S2_TAN_PROJECTION, S2_QUADRATIC_PROJECTION + } + + private static final Projections S2_PROJECTION = Projections.S2_QUADRATIC_PROJECTION; + + // All of the values below were obtained by a combination of hand analysis and + // Mathematica. In general, S2_TAN_PROJECTION produces the most uniform + // shapes and sizes of cells, S2_LINEAR_PROJECTION is considerably worse, and + // S2_QUADRATIC_PROJECTION is somewhere in between (but generally closer to + // the tangent projection than the linear one). + + + // The minimum area of any cell at level k is at least MIN_AREA.GetValue(k), + // and the maximum is at most MAX_AREA.GetValue(k). The average area of all + // cells at level k is exactly AVG_AREA.GetValue(k). + public static final Metric MIN_AREA = new Metric(2, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 1 / (3 * Math.sqrt(3)) : // 0.192 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? (S2.M_PI * S2.M_PI) + / (16 * S2.M_SQRT2) : // 0.436 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 2 * S2.M_SQRT2 / 9 : // 0.314 + 0); + public static final Metric MAX_AREA = new Metric(2, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 1 : // 1.000 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI * S2.M_PI / 16 : // 0.617 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 0.65894981424079037 : // 0.659 + 0); + public static final Metric AVG_AREA = new Metric(2, S2.M_PI / 6); // 0.524) + + + // Each cell is bounded by four planes passing through its four edges and + // the center of the sphere. These metrics relate to the angle between each + // pair of opposite bounding planes, or equivalently, between the planes + // corresponding to two different s-values or two different t-values. For + // example, the maximum angle between opposite bounding planes for a cell at + // level k is MAX_ANGLE_SPAN.GetValue(k), and the average angle span for all + // cells at level k is approximately AVG_ANGLE_SPAN.GetValue(k). + public static final Metric MIN_ANGLE_SPAN = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 0.5 : // 0.500 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI / 4 : // 0.785 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION ? 2. / 3 : // 0.667 + 0); + public static final Metric MAX_ANGLE_SPAN = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 1 : // 1.000 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI / 4 : // 0.785 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 0.85244858959960922 : // 0.852 + 0); + public static final Metric AVG_ANGLE_SPAN = new Metric(1, S2.M_PI / 4); // 0.785 + + + // The width of geometric figure is defined as the distance between two + // parallel bounding lines in a given direction. For cells, the minimum + // width is always attained between two opposite edges, and the maximum + // width is attained between two opposite vertices. However, for our + // purposes we redefine the width of a cell as the perpendicular distance + // between a pair of opposite edges. A cell therefore has two widths, one + // in each direction. The minimum width according to this definition agrees + // with the classic geometric one, but the maximum width is different. (The + // maximum geometric width corresponds to MAX_DIAG defined below.) + // + // For a cell at level k, the distance between opposite edges is at least + // MIN_WIDTH.GetValue(k) and at most MAX_WIDTH.GetValue(k). The average + // width in both directions for all cells at level k is approximately + // AVG_WIDTH.GetValue(k). + // + // The width is useful for bounding the minimum or maximum distance from a + // point on one edge of a cell to the closest point on the opposite edge. + // For example, this is useful when "growing" regions by a fixed distance. + public static final Metric MIN_WIDTH = new Metric(1, + (S2Projections.S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 1 / Math.sqrt(6) : // 0.408 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI / (4 * S2.M_SQRT2) : // 0.555 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION ? S2.M_SQRT2 / 3 : // 0.471 + 0)); + + public static final Metric MAX_WIDTH = new Metric(1, MAX_ANGLE_SPAN.deriv()); + public static final Metric AVG_WIDTH = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 0.70572967292222848 : // 0.706 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? 0.71865931946258044 : // 0.719 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 0.71726183644304969 : // 0.717 + 0); + + // The minimum edge length of any cell at level k is at least + // MIN_EDGE.GetValue(k), and the maximum is at most MAX_EDGE.GetValue(k). + // The average edge length is approximately AVG_EDGE.GetValue(k). + // + // The edge length metrics can also be used to bound the minimum, maximum, + // or average distance from the center of one cell to the center of one of + // its edge neighbors. In particular, it can be used to bound the distance + // between adjacent cell centers along the space-filling Hilbert curve for + // cells at any given level. + public static final Metric MIN_EDGE = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? S2.M_SQRT2 / 3 : // 0.471 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI / (4 * S2.M_SQRT2) : // 0.555 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION ? S2.M_SQRT2 / 3 : // 0.471 + 0); + public static final Metric MAX_EDGE = new Metric(1, MAX_ANGLE_SPAN.deriv()); + public static final Metric AVG_EDGE = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 0.72001709647780182 : // 0.720 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? 0.73083351627336963 : // 0.731 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 0.72960687319305303 : // 0.730 + 0); + + + // The minimum diagonal length of any cell at level k is at least + // MIN_DIAG.GetValue(k), and the maximum is at most MAX_DIAG.GetValue(k). + // The average diagonal length is approximately AVG_DIAG.GetValue(k). + // + // The maximum diagonal also happens to be the maximum diameter of any cell, + // and also the maximum geometric width (see the discussion above). So for + // example, the distance from an arbitrary point to the closest cell center + // at a given level is at most half the maximum diagonal length. + public static final Metric MIN_DIAG = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? S2.M_SQRT2 / 3 : // 0.471 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI / (3 * S2.M_SQRT2) : // 0.740 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 4 * S2.M_SQRT2 / 9 : // 0.629 + 0); + public static final Metric MAX_DIAG = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? S2.M_SQRT2 : // 1.414 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_PI / Math.sqrt(6) : // 1.283 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 1.2193272972170106 : // 1.219 + 0); + public static final Metric AVG_DIAG = new Metric(1, + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? 1.0159089332094063 : // 1.016 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? 1.0318115985978178 : // 1.032 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION + ? 1.03021136949923584 : // 1.030 + 0); + + // This is the maximum edge aspect ratio over all cells at any level, where + // the edge aspect ratio of a cell is defined as the ratio of its longest + // edge length to its shortest edge length. + public static final double MAX_EDGE_ASPECT = + S2_PROJECTION == Projections.S2_LINEAR_PROJECTION ? S2.M_SQRT2 : // 1.414 + S2_PROJECTION == Projections.S2_TAN_PROJECTION ? S2.M_SQRT2 : // 1.414 + S2_PROJECTION == Projections.S2_QUADRATIC_PROJECTION ? 1.44261527445268292 : // 1.443 + 0; + + + public static final double MAX_DIAG_ASPECT = Math.sqrt(3); // 1.732 + // This is the maximum diagonal aspect ratio over all cells at any level, + // where the diagonal aspect ratio of a cell is defined as the ratio of its + // longest diagonal length to its shortest diagonal length. + + + // IJ_TO_POS[orientation][ij] -> pos + // + // Given a cell orientation and the (i,j)-index of a subcell (0=(0,0), + // 1=(0,1), 2=(1,0), 3=(1,1)), return the order in which this subcell is + // visited by the Hilbert curve (a position in the range [0..3]). + public static final int IJ_TO_POS[][] = { + // (0,0) (0,1) (1,0) (1,1) + {0, 1, 3, 2}, // canonical order + {0, 3, 1, 2}, // axes swapped + {2, 3, 1, 0}, // bits inverted + {2, 1, 3, 0}, // swapped & inverted + }; + + public static double stToUV(double s) { + switch (S2_PROJECTION) { + case S2_LINEAR_PROJECTION: + return s; + case S2_TAN_PROJECTION: + // Unfortunately, tan(M_PI_4) is slightly less than 1.0. This isn't due + // to + // a flaw in the implementation of tan(), it's because the derivative of + // tan(x) at x=pi/4 is 2, and it happens that the two adjacent floating + // point numbers on either side of the infinite-precision value of pi/4 + // have + // tangents that are slightly below and slightly above 1.0 when rounded + // to + // the nearest double-precision result. + s = Math.tan(S2.M_PI_4 * s); + return s + (1.0 / (1L << 53)) * s; + case S2_QUADRATIC_PROJECTION: + if (s >= 0) { + return (1 / 3.) * ((1 + s) * (1 + s) - 1); + } else { + return (1 / 3.) * (1 - (1 - s) * (1 - s)); + } + default: + throw new IllegalStateException("Invalid value for S2_PROJECTION"); + } + } + + public static double uvToST(double u) { + switch (S2_PROJECTION) { + case S2_LINEAR_PROJECTION: + return u; + case S2_TAN_PROJECTION: + return (4 * S2.M_1_PI) * Math.atan(u); + case S2_QUADRATIC_PROJECTION: + if (u >= 0) { + return Math.sqrt(1 + 3 * u) - 1; + } else { + return 1 - Math.sqrt(1 - 3 * u); + } + default: + throw new IllegalStateException("Invalid value for S2_PROJECTION"); + } + } + + + /** + * Convert (face, u, v) coordinates to a direction vector (not necessarily + * unit length). + */ + public static S2Point faceUvToXyz(int face, double u, double v) { + switch (face) { + case 0: + return new S2Point(1, u, v); + case 1: + return new S2Point(-u, 1, v); + case 2: + return new S2Point(-u, -v, 1); + case 3: + return new S2Point(-1, -v, -u); + case 4: + return new S2Point(v, -1, -u); + default: + return new S2Point(v, u, -1); + } + } + + public static R2Vector validFaceXyzToUv(int face, S2Point p) { + // assert (p.dotProd(faceUvToXyz(face, 0, 0)) > 0); + double pu; + double pv; + switch (face) { + case 0: + pu = p.y / p.x; + pv = p.z / p.x; + break; + case 1: + pu = -p.x / p.y; + pv = p.z / p.y; + break; + case 2: + pu = -p.x / p.z; + pv = -p.y / p.z; + break; + case 3: + pu = p.z / p.x; + pv = p.y / p.x; + break; + case 4: + pu = p.z / p.y; + pv = -p.x / p.y; + break; + default: + pu = -p.y / p.z; + pv = -p.x / p.z; + break; + } + return new R2Vector(pu, pv); + } + + public static int xyzToFaceUV(S2Point p, R2Vector uv) { + int face = p.largestAbsComponent(); + if (p.get(face) < 0) { + face += 3; + } + R2Vector point = validFaceXyzToUv(face, p); + uv.x = point.x; + uv.y = point.y; + return face; + } + + public static boolean faceXyzToUv(int face, S2Point p, R2Vector uv) { + if (face < 3) { + if (p.get(face) <= 0) { + return false; + } + } else { + if (p.get(face - 3) >= 0) { + return false; + } + } + R2Vector point = validFaceXyzToUv(face, p); + uv.x = point.x; + uv.y = point.y; + return true; + } + + public static S2Point getUNorm(int face, double u) { + switch (face) { + case 0: + return new S2Point(u, -1, 0); + case 1: + return new S2Point(1, u, 0); + case 2: + return new S2Point(1, 0, u); + case 3: + return new S2Point(-u, 0, 1); + case 4: + return new S2Point(0, -u, 1); + default: + return new S2Point(0, -1, -u); + } + } + + public static S2Point getVNorm(int face, double v) { + switch (face) { + case 0: + return new S2Point(-v, 0, 1); + case 1: + return new S2Point(0, -v, 1); + case 2: + return new S2Point(0, -1, -v); + case 3: + return new S2Point(v, -1, 0); + case 4: + return new S2Point(1, v, 0); + default: + return new S2Point(1, 0, v); + } + } + + public static S2Point getNorm(int face) { + return faceUvToXyz(face, 0, 0); + } + + public static S2Point getUAxis(int face) { + switch (face) { + case 0: + return new S2Point(0, 1, 0); + case 1: + return new S2Point(-1, 0, 0); + case 2: + return new S2Point(-1, 0, 0); + case 3: + return new S2Point(0, 0, -1); + case 4: + return new S2Point(0, 0, -1); + default: + return new S2Point(0, 1, 0); + } + } + + public static S2Point getVAxis(int face) { + switch (face) { + case 0: + return new S2Point(0, 0, 1); + case 1: + return new S2Point(0, 0, 1); + case 2: + return new S2Point(0, -1, 0); + case 3: + return new S2Point(0, -1, 0); + case 4: + return new S2Point(1, 0, 0); + default: + return new S2Point(1, 0, 0); + } + } + + // Don't instantiate + private S2Projections() { + } +} diff --git a/src/com/google/common/geometry/S2Region.java b/src/com/google/common/geometry/S2Region.java new file mode 100644 index 0000000..9a0ee11 --- /dev/null +++ b/src/com/google/common/geometry/S2Region.java @@ -0,0 +1,51 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +/** + * An S2Region represents a two-dimensional region over the unit sphere. It is + * an abstract interface with various concrete subtypes. + * + * The main purpose of this interface is to allow complex regions to be + * approximated as simpler regions. So rather than having a wide variety of + * virtual methods that are implemented by all subtypes, the interface is + * restricted to methods that are useful for computing approximations. + * + * + */ +public interface S2Region { + + /** Return a bounding spherical cap. */ + public abstract S2Cap getCapBound(); + + + /** Return a bounding latitude-longitude rectangle. */ + public abstract S2LatLngRect getRectBound(); + + /** + * If this method returns true, the region completely contains the given cell. + * Otherwise, either the region does not contain the cell or the containment + * relationship could not be determined. + */ + public abstract boolean contains(S2Cell cell); + + /** + * If this method returns false, the region does not intersect the given cell. + * Otherwise, either region intersects the cell, or the intersection + * relationship could not be determined. + */ + public abstract boolean mayIntersect(S2Cell cell); +} diff --git a/src/com/google/common/geometry/S2RegionCoverer.java b/src/com/google/common/geometry/S2RegionCoverer.java new file mode 100644 index 0000000..14e62b1 --- /dev/null +++ b/src/com/google/common/geometry/S2RegionCoverer.java @@ -0,0 +1,548 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.base.Preconditions; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashSet; +import java.util.PriorityQueue; + +/** + * An S2RegionCoverer is a class that allows arbitrary regions to be + * approximated as unions of cells (S2CellUnion). This is useful for + * implementing various sorts of search and precomputation operations. + * + * Typical usage: {@code S2RegionCoverer coverer; coverer.setMaxCells(5); S2Cap + * cap = S2Cap.fromAxisAngle(...); S2CellUnion covering; + * coverer.getCovering(cap, covering); * } + * + * This yields a cell union of at most 5 cells that is guaranteed to cover the + * given cap (a disc-shaped region on the sphere). + * + * The approximation algorithm is not optimal but does a pretty good job in + * practice. The output does not always use the maximum number of cells allowed, + * both because this would not always yield a better approximation, and because + * max_cells() is a limit on how much work is done exploring the possible + * covering as well as a limit on the final output size. + * + * One can also generate interior coverings, which are sets of cells which are + * entirely contained within a region. Interior coverings can be empty, even for + * non-empty regions, if there are no cells that satisfy the provided + * constraints and are contained by the region. Note that for performance + * reasons, it is wise to specify a max_level when computing interior coverings + * - otherwise for regions with small or zero area, the algorithm may spend a + * lot of time subdividing cells all the way to leaf level to try to find + * contained cells. + * + * This class is thread-unsafe. Simultaneous calls to any of the getCovering + * methods will conflict and produce unpredictable results. + * + */ +public strictfp class S2RegionCoverer { + + /** + * By default, the covering uses at most 8 cells at any level. This gives a + * reasonable tradeoff between the number of cells used and the accuracy of + * the approximation (see table below). + */ + public static final int DEFAULT_MAX_CELLS = 8; + + private static final S2Cell[] FACE_CELLS = new S2Cell[6]; + static { + for (int face = 0; face < 6; ++face) { + FACE_CELLS[face] = S2Cell.fromFacePosLevel(face, (byte) 0, 0); + } + } + + + private int minLevel; + private int maxLevel; + private int levelMod; + private int maxCells; + + // True if we're computing an interior covering. + private boolean interiorCovering; + + // Counter of number of candidates created, for performance evaluation. + private int candidatesCreatedCounter; + + /** + * We save a temporary copy of the pointer passed to GetCovering() in order to + * avoid passing this parameter around internally. It is only used (and only + * valid) for the duration of a single GetCovering() call. + */ + S2Region region; + + /** + * A temporary variable used by GetCovering() that holds the cell ids that + * have been added to the covering so far. + */ + ArrayList<S2CellId> result; + + + static class Candidate { + private S2Cell cell; + private boolean isTerminal; // Cell should not be expanded further. + private int numChildren; // Number of children that intersect the region. + private Candidate[] children; // Actual size may be 0, 4, 16, or 64 + // elements. + } + + static class QueueEntry { + private int id; + private Candidate candidate; + + public QueueEntry(int id, Candidate candidate) { + this.id = id; + this.candidate = candidate; + } + } + + /** + * We define our own comparison function on QueueEntries in order to make the + * results deterministic. Using the default less<QueueEntry>, entries of equal + * priority would be sorted according to the memory address of the candidate. + */ + static class QueueEntriesComparator implements Comparator<QueueEntry> { + @Override + public int compare(S2RegionCoverer.QueueEntry x, S2RegionCoverer.QueueEntry y) { + return x.id < y.id ? 1 : (x.id > y.id ? -1 : 0); + } + } + + + /** + * We keep the candidates in a priority queue. We specify a vector to hold the + * queue entries since for some reason priority_queue<> uses a deque by + * default. + */ + private PriorityQueue<QueueEntry> candidateQueue; + + /** + * Default constructor, sets all fields to default values. + */ + public S2RegionCoverer() { + minLevel = 0; + maxLevel = S2CellId.MAX_LEVEL; + levelMod = 1; + maxCells = DEFAULT_MAX_CELLS; + this.region = null; + result = new ArrayList<S2CellId>(); + // TODO(kirilll?): 10 is a completely random number, work out a better + // estimate + candidateQueue = new PriorityQueue<QueueEntry>(10, new QueueEntriesComparator()); + } + + // Set the minimum and maximum cell level to be used. The default is to use + // all cell levels. Requires: max_level() >= min_level(). + // + // To find the cell level corresponding to a given physical distance, use + // the S2Cell metrics defined in s2.h. For example, to find the cell + // level that corresponds to an average edge length of 10km, use: + // + // int level = S2::kAvgEdge.GetClosestLevel( + // geostore::S2Earth::KmToRadians(length_km)); + // + // Note: min_level() takes priority over max_cells(), i.e. cells below the + // given level will never be used even if this causes a large number of + // cells to be returned. + + /** + * Sets the minimum level to be used. + */ + public void setMinLevel(int minLevel) { + // assert (minLevel >= 0 && minLevel <= S2CellId.MAX_LEVEL); + this.minLevel = Math.max(0, Math.min(S2CellId.MAX_LEVEL, minLevel)); + } + + /** + * Sets the maximum level to be used. + */ + public void setMaxLevel(int maxLevel) { + // assert (maxLevel >= 0 && maxLevel <= S2CellId.MAX_LEVEL); + this.maxLevel = Math.max(0, Math.min(S2CellId.MAX_LEVEL, maxLevel)); + } + + public int minLevel() { + return minLevel; + } + + public int maxLevel() { + return maxLevel; + } + + public int maxCells() { + return maxCells; + } + + /** + * If specified, then only cells where (level - min_level) is a multiple of + * "level_mod" will be used (default 1). This effectively allows the branching + * factor of the S2CellId hierarchy to be increased. Currently the only + * parameter values allowed are 1, 2, or 3, corresponding to branching factors + * of 4, 16, and 64 respectively. + */ + public void setLevelMod(int levelMod) { + // assert (levelMod >= 1 && levelMod <= 3); + this.levelMod = Math.max(1, Math.min(3, levelMod)); + } + + public int levelMod() { + return levelMod; + } + + + /** + * Sets the maximum desired number of cells in the approximation (defaults to + * kDefaultMaxCells). Note the following: + * + * <ul> + * <li>For any setting of max_cells(), up to 6 cells may be returned if that + * is the minimum number of cells required (e.g. if the region intersects all + * six face cells). Up to 3 cells may be returned even for very tiny convex + * regions if they happen to be located at the intersection of three cube + * faces. + * + * <li>For any setting of max_cells(), an arbitrary number of cells may be + * returned if min_level() is too high for the region being approximated. + * + * <li>If max_cells() is less than 4, the area of the covering may be + * arbitrarily large compared to the area of the original region even if the + * region is convex (e.g. an S2Cap or S2LatLngRect). + * </ul> + * + * Accuracy is measured by dividing the area of the covering by the area of + * the original region. The following table shows the median and worst case + * values for this area ratio on a test case consisting of 100,000 spherical + * caps of random size (generated using s2regioncoverer_unittest): + * + * <pre> + * max_cells: 3 4 5 6 8 12 20 100 1000 + * median ratio: 5.33 3.32 2.73 2.34 1.98 1.66 1.42 1.11 1.01 + * worst case: 215518 14.41 9.72 5.26 3.91 2.75 1.92 1.20 1.02 + * </pre> + */ + public void setMaxCells(int maxCells) { + this.maxCells = maxCells; + } + + /** + * Computes a list of cell ids that covers the given region and satisfies the + * various restrictions specified above. + * + * @param region The region to cover + * @param covering The list filled in by this method + */ + public void getCovering(S2Region region, ArrayList<S2CellId> covering) { + // Rather than just returning the raw list of cell ids generated by + // GetCoveringInternal(), we construct a cell union and then denormalize it. + // This has the effect of replacing four child cells with their parent + // whenever this does not violate the covering parameters specified + // (min_level, level_mod, etc). This strategy significantly reduces the + // number of cells returned in many cases, and it is cheap compared to + // computing the covering in the first place. + + S2CellUnion tmp = getCovering(region); + tmp.denormalize(minLevel(), levelMod(), covering); + } + + /** + * Computes a list of cell ids that is contained within the given region and + * satisfies the various restrictions specified above. + * + * @param region The region to fill + * @param interior The list filled in by this method + */ + public void getInteriorCovering(S2Region region, ArrayList<S2CellId> interior) { + S2CellUnion tmp = getInteriorCovering(region); + tmp.denormalize(minLevel(), levelMod(), interior); + } + + /** + * Return a normalized cell union that covers the given region and satisfies + * the restrictions *EXCEPT* for min_level() and level_mod(). These criteria + * cannot be satisfied using a cell union because cell unions are + * automatically normalized by replacing four child cells with their parent + * whenever possible. (Note that the list of cell ids passed to the cell union + * constructor does in fact satisfy all the given restrictions.) + */ + public S2CellUnion getCovering(S2Region region) { + S2CellUnion covering = new S2CellUnion(); + getCovering(region, covering); + return covering; + } + + public void getCovering(S2Region region, S2CellUnion covering) { + interiorCovering = false; + getCoveringInternal(region); + covering.initSwap(result); + } + + /** + * Return a normalized cell union that is contained within the given region + * and satisfies the restrictions *EXCEPT* for min_level() and level_mod(). + */ + public S2CellUnion getInteriorCovering(S2Region region) { + S2CellUnion covering = new S2CellUnion(); + getInteriorCovering(region, covering); + return covering; + } + + public void getInteriorCovering(S2Region region, S2CellUnion covering) { + interiorCovering = true; + getCoveringInternal(region); + covering.initSwap(result); + } + + /** + * Given a connected region and a starting point, return a set of cells at the + * given level that cover the region. + */ + public static void getSimpleCovering( + S2Region region, S2Point start, int level, ArrayList<S2CellId> output) { + floodFill(region, S2CellId.fromPoint(start).parent(level), output); + } + + /** + * If the cell intersects the given region, return a new candidate with no + * children, otherwise return null. Also marks the candidate as "terminal" if + * it should not be expanded further. + */ + private Candidate newCandidate(S2Cell cell) { + if (!region.mayIntersect(cell)) { + return null; + } + + boolean isTerminal = false; + if (cell.level() >= minLevel) { + if (interiorCovering) { + if (region.contains(cell)) { + isTerminal = true; + } else if (cell.level() + levelMod > maxLevel) { + return null; + } + } else { + if (cell.level() + levelMod > maxLevel || region.contains(cell)) { + isTerminal = true; + } + } + } + Candidate candidate = new Candidate(); + candidate.cell = cell; + candidate.isTerminal = isTerminal; + if (!isTerminal) { + candidate.children = new Candidate[1 << maxChildrenShift()]; + } + candidatesCreatedCounter++; + return candidate; + } + + /** Return the log base 2 of the maximum number of children of a candidate. */ + private int maxChildrenShift() { + return 2 * levelMod; + } + + /** + * Process a candidate by either adding it to the result list or expanding its + * children and inserting it into the priority queue. Passing an argument of + * NULL does nothing. + */ + private void addCandidate(Candidate candidate) { + if (candidate == null) { + return; + } + + if (candidate.isTerminal) { + result.add(candidate.cell.id()); + return; + } + // assert (candidate.numChildren == 0); + + // Expand one level at a time until we hit min_level_ to ensure that + // we don't skip over it. + int numLevels = (candidate.cell.level() < minLevel) ? 1 : levelMod; + int numTerminals = expandChildren(candidate, candidate.cell, numLevels); + + if (candidate.numChildren == 0) { + // Do nothing + } else if (!interiorCovering && numTerminals == 1 << maxChildrenShift() + && candidate.cell.level() >= minLevel) { + // Optimization: add the parent cell rather than all of its children. + // We can't do this for interior coverings, since the children just + // intersect the region, but may not be contained by it - we need to + // subdivide them further. + candidate.isTerminal = true; + addCandidate(candidate); + + } else { + // We negate the priority so that smaller absolute priorities are returned + // first. The heuristic is designed to refine the largest cells first, + // since those are where we have the largest potential gain. Among cells + // at the same level, we prefer the cells with the smallest number of + // intersecting children. Finally, we prefer cells that have the smallest + // number of children that cannot be refined any further. + int priority = -((((candidate.cell.level() << maxChildrenShift()) + candidate.numChildren) + << maxChildrenShift()) + numTerminals); + candidateQueue.add(new QueueEntry(priority, candidate)); + // logger.info("Push: " + candidate.cell.id() + " (" + priority + ") "); + } + } + + /** + * Populate the children of "candidate" by expanding the given number of + * levels from the given cell. Returns the number of children that were marked + * "terminal". + */ + private int expandChildren(Candidate candidate, S2Cell cell, int numLevels) { + numLevels--; + S2Cell[] childCells = new S2Cell[4]; + for (int i = 0; i < 4; ++i) { + childCells[i] = new S2Cell(); + } + cell.subdivide(childCells); + int numTerminals = 0; + for (int i = 0; i < 4; ++i) { + if (numLevels > 0) { + if (region.mayIntersect(childCells[i])) { + numTerminals += expandChildren(candidate, childCells[i], numLevels); + } + continue; + } + Candidate child = newCandidate(childCells[i]); + if (child != null) { + candidate.children[candidate.numChildren++] = child; + if (child.isTerminal) { + ++numTerminals; + } + } + } + return numTerminals; + } + + /** Computes a set of initial candidates that cover the given region. */ + private void getInitialCandidates() { + // Optimization: if at least 4 cells are desired (the normal case), + // start with a 4-cell covering of the region's bounding cap. This + // lets us skip quite a few levels of refinement when the region to + // be covered is relatively small. + if (maxCells >= 4) { + // Find the maximum level such that the bounding cap contains at most one + // cell vertex at that level. + S2Cap cap = region.getCapBound(); + int level = Math.min(S2Projections.MIN_WIDTH.getMaxLevel(2 * cap.angle().radians()), + Math.min(maxLevel(), S2CellId.MAX_LEVEL - 1)); + if (levelMod() > 1 && level > minLevel()) { + level -= (level - minLevel()) % levelMod(); + } + // We don't bother trying to optimize the level == 0 case, since more than + // four face cells may be required. + if (level > 0) { + // Find the leaf cell containing the cap axis, and determine which + // subcell of the parent cell contains it. + ArrayList<S2CellId> base = new ArrayList<S2CellId>(4); + S2CellId id = S2CellId.fromPoint(cap.axis()); + id.getVertexNeighbors(level, base); + for (int i = 0; i < base.size(); ++i) { + addCandidate(newCandidate(new S2Cell(base.get(i)))); + } + return; + } + } + // Default: start with all six cube faces. + for (int face = 0; face < 6; ++face) { + addCandidate(newCandidate(FACE_CELLS[face])); + } + } + + /** Generates a covering and stores it in result. */ + private void getCoveringInternal(S2Region region) { + // Strategy: Start with the 6 faces of the cube. Discard any + // that do not intersect the shape. Then repeatedly choose the + // largest cell that intersects the shape and subdivide it. + // + // result contains the cells that will be part of the output, while the + // priority queue contains cells that we may still subdivide further. Cells + // that are entirely contained within the region are immediately added to + // the output, while cells that do not intersect the region are immediately + // discarded. + // Therefore pq_ only contains cells that partially intersect the region. + // Candidates are prioritized first according to cell size (larger cells + // first), then by the number of intersecting children they have (fewest + // children first), and then by the number of fully contained children + // (fewest children first). + + Preconditions.checkState(candidateQueue.isEmpty() && result.isEmpty()); + + this.region = region; + candidatesCreatedCounter = 0; + + getInitialCandidates(); + while (!candidateQueue.isEmpty() && (!interiorCovering || result.size() < maxCells)) { + Candidate candidate = candidateQueue.poll().candidate; + // logger.info("Pop: " + candidate.cell.id()); + if (candidate.cell.level() < minLevel || candidate.numChildren == 1 + || result.size() + (interiorCovering ? 0 : candidateQueue.size()) + candidate.numChildren + <= maxCells) { + // Expand this candidate into its children. + for (int i = 0; i < candidate.numChildren; ++i) { + addCandidate(candidate.children[i]); + } + } else if (interiorCovering) { + // Do nothing + } else { + candidate.isTerminal = true; + addCandidate(candidate); + } + } + + candidateQueue.clear(); + this.region = null; + } + + /** + * Given a region and a starting cell, return the set of all the + * edge-connected cells at the same level that intersect "region". The output + * cells are returned in arbitrary order. + */ + private static void floodFill(S2Region region, S2CellId start, ArrayList<S2CellId> output) { + HashSet<S2CellId> all = new HashSet<S2CellId>(); + ArrayList<S2CellId> frontier = new ArrayList<S2CellId>(); + output.clear(); + all.add(start); + frontier.add(start); + while (!frontier.isEmpty()) { + S2CellId id = frontier.get(frontier.size() - 1); + frontier.remove(frontier.size() - 1); + if (!region.mayIntersect(new S2Cell(id))) { + continue; + } + output.add(id); + + S2CellId[] neighbors = new S2CellId[4]; + id.getEdgeNeighbors(neighbors); + for (int edge = 0; edge < 4; ++edge) { + S2CellId nbr = neighbors[edge]; + boolean hasNbr = all.contains(nbr); + if (!all.contains(nbr)) { + frontier.add(nbr); + all.add(nbr); + } + } + } + } +} diff --git a/tests/com/google/common/geometry/DoubleMathTest.java b/tests/com/google/common/geometry/DoubleMathTest.java new file mode 100644 index 0000000..0443426 --- /dev/null +++ b/tests/com/google/common/geometry/DoubleMathTest.java @@ -0,0 +1,69 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.geometry.DoubleMath.MantissaExponent; + +import junit.framework.TestCase; + +import java.util.Random; + +/** + * Tests the advanced floating point operations defined in DoubleMath.java. + * + */ +public class DoubleMathTest extends TestCase { + + Random rnd = new Random(12345); + + public void testFrexp() { + for (int i = 0; i < 10; ++i) { + MantissaExponent me = DoubleMath.frexp(Math.pow(2, i)); + assertEquals(0.5, me.mantissa); + assertEquals(i + 1, me.exp); + } + + for (int i = 0; i < 10; ++i) { + MantissaExponent me = DoubleMath.frexp(-Math.pow(2, i)); + assertEquals(-0.5, me.mantissa); + assertEquals(i + 1, me.exp); + } + + MantissaExponent me = DoubleMath.frexp(0); + assertEquals(0.0, me.mantissa); + assertEquals(0, me.exp); + + me = DoubleMath.frexp(3); + assertEquals(0.75, me.mantissa); + assertEquals(2, me.exp); + + me = DoubleMath.frexp(5); + assertEquals(0.625, me.mantissa); + assertEquals(3, me.exp); + } + + public void testCompareTo() { + for (int i = 0; i < 100; ++i) { + double x = rnd.nextDouble() * 100 - 50; + double y = rnd.nextDouble() * 100 - 50; + MantissaExponent m1 = DoubleMath.frexp(x); + MantissaExponent m2 = DoubleMath.frexp(y); + + assertEquals(new Double(x).compareTo(y), m1.compareTo(m2)); + } + } +} diff --git a/tests/com/google/common/geometry/GeometryTestCase.java b/tests/com/google/common/geometry/GeometryTestCase.java new file mode 100644 index 0000000..cb0719c --- /dev/null +++ b/tests/com/google/common/geometry/GeometryTestCase.java @@ -0,0 +1,211 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; + +import junit.framework.TestCase; + +import java.util.List; +import java.util.Random; + +public strictfp class GeometryTestCase extends TestCase { + + public Random rand; + + @Override + protected void setUp() { + rand = new Random(123456); + } + + public void assertDoubleNear(double a, double b) { + assertDoubleNear(a, b, 1e-9); + } + + public void assertDoubleNear(double a, double b, double error) { + assertTrue(a + error > b); + assertTrue(a < b + error); + } + + // maybe these should be put in a special testing util class + /** Return a random unit-length vector. */ + public S2Point randomPoint() { + return S2Point.normalize(new S2Point( + 2 * rand.nextDouble() - 1, + 2 * rand.nextDouble() - 1, + 2 * rand.nextDouble() - 1)); + } + + /** + * Return a right-handed coordinate frame (three orthonormal vectors). Returns + * an array of three points: x,y,z + */ + public ImmutableList<S2Point> getRandomFrame() { + S2Point p0 = randomPoint(); + S2Point p1 = S2Point.normalize(S2Point.crossProd(p0, randomPoint())); + S2Point p2 = S2Point.normalize(S2Point.crossProd(p0, p1)); + return ImmutableList.of(p0, p1, p2); + } + + /** + * Return a random cell id at the given level or at a randomly chosen level. + * The distribution is uniform over the space of cell ids, but only + * approximately uniform over the surface of the sphere. + */ + public S2CellId getRandomCellId(int level) { + int face = random(S2CellId.NUM_FACES); + long pos = rand.nextLong() & ((1L << (2 * S2CellId.MAX_LEVEL)) - 1); + return S2CellId.fromFacePosLevel(face, pos, level); + } + + public S2CellId getRandomCellId() { + return getRandomCellId(random(S2CellId.MAX_LEVEL + 1)); + } + + int random(int n) { + if (n == 0) { + return 0; + } + return rand.nextInt(n); + } + + + // Pick "base" uniformly from range [0,maxLog] and then return + // "base" random bits. The effect is to pick a number in the range + // [0,2^maxLog-1] with bias towards smaller numbers. + int skewed(int maxLog) { + final int base = Math.abs(rand.nextInt()) % (maxLog + 1); + // if (!base) return 0; // if 0==base, we & with 0 below. + // + // this distribution differs slightly from ACMRandom's Skewed, + // since 0 occurs approximately 3 times more than 1 here, and + // ACMRandom's Skewed never outputs 0. + return rand.nextInt() & ((1 << base) - 1); + } + + /** + * Checks that "covering" completely covers the given region. If "check_tight" + * is true, also checks that it does not contain any cells that do not + * intersect the given region. ("id" is only used internally.) + */ + void checkCovering(S2Region region, S2CellUnion covering, boolean checkTight, S2CellId id) { + if (!id.isValid()) { + for (int face = 0; face < 6; ++face) { + checkCovering(region, covering, checkTight, S2CellId.fromFacePosLevel(face, 0, 0)); + } + return; + } + + if (!region.mayIntersect(new S2Cell(id))) { + // If region does not intersect id, then neither should the covering. + if (checkTight) { + assertTrue(!covering.intersects(id)); + } + + } else if (!covering.contains(id)) { + // The region may intersect id, but we can't assert that the covering + // intersects id because we may discover that the region does not actually + // intersect upon further subdivision. (MayIntersect is not exact.) + assertTrue(!region.contains(new S2Cell(id))); + assertTrue(!id.isLeaf()); + S2CellId end = id.childEnd(); + for (S2CellId child = id.childBegin(); !child.equals(end); child = child.next()) { + checkCovering(region, covering, checkTight, child); + } + } + } + + S2Cap getRandomCap(double minArea, double maxArea) { + double capArea = maxArea + * Math.pow(minArea / maxArea, rand.nextDouble()); + assertTrue(capArea >= minArea && capArea <= maxArea); + + // The surface area of a cap is 2*Pi times its height. + return S2Cap.fromAxisArea(randomPoint(), capArea); + } + + S2Point samplePoint(S2Cap cap) { + // We consider the cap axis to be the "z" axis. We choose two other axes to + // complete the coordinate frame. + + S2Point z = cap.axis(); + S2Point x = z.ortho(); + S2Point y = S2Point.crossProd(z, x); + + // The surface area of a spherical cap is directly proportional to its + // height. First we choose a random height, and then we choose a random + // point along the circle at that height. + + double h = rand.nextDouble() * cap.height(); + double theta = 2 * S2.M_PI * rand.nextDouble(); + double r = Math.sqrt(h * (2 - h)); // Radius of circle. + + // (cos(theta)*r*x + sin(theta)*r*y + (1-h)*z).Normalize() + return S2Point.normalize(S2Point.add( + S2Point.add(S2Point.mul(x, Math.cos(theta) * r), S2Point.mul(y, Math.sin(theta) * r)), + S2Point.mul(z, (1 - h)))); + } + + static void parseVertices(String str, List<S2Point> vertices) { + if (str == null) { + return; + } + + for (String token : Splitter.on(',').split(str)) { + int colon = token.indexOf(':'); + if (colon == -1) { + throw new IllegalArgumentException( + "Illegal string:" + token + ". Should look like '35:20'"); + } + double lat = Double.parseDouble(token.substring(0, colon)); + double lng = Double.parseDouble(token.substring(colon + 1)); + vertices.add(S2LatLng.fromDegrees(lat, lng).toPoint()); + } + } + + static S2Point makePoint(String str) { + List<S2Point> vertices = Lists.newArrayList(); + parseVertices(str, vertices); + return Iterables.getOnlyElement(vertices); + } + + static S2Loop makeLoop(String str) { + List<S2Point> vertices = Lists.newArrayList(); + parseVertices(str, vertices); + return new S2Loop(vertices); + } + + static S2Polygon makePolygon(String str) { + List<S2Loop> loops = Lists.newArrayList(); + + for (String token : Splitter.on(';').omitEmptyStrings().split(str)) { + S2Loop loop = makeLoop(token); + loop.normalize(); + loops.add(loop); + } + + return new S2Polygon(loops); + } + + static S2Polyline makePolyline(String str) { + List<S2Point> vertices = Lists.newArrayList(); + parseVertices(str, vertices); + return new S2Polyline(vertices); + } +} diff --git a/tests/com/google/common/geometry/R1IntervalTest.java b/tests/com/google/common/geometry/R1IntervalTest.java new file mode 100644 index 0000000..0fe87ea --- /dev/null +++ b/tests/com/google/common/geometry/R1IntervalTest.java @@ -0,0 +1,115 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +public strictfp class R1IntervalTest extends GeometryTestCase { + + + /** + * Test all of the interval operations on the given pair of intervals. + * "expected_relation" is a sequence of "T" and "F" characters corresponding + * to the expected results of contains(), interiorContains(), Intersects(), + * and InteriorIntersects() respectively. + */ + public void testIntervalOps(R1Interval x, R1Interval y, String expectedRelation) { + assertEquals(x.contains(y), expectedRelation.charAt(0) == 'T'); + assertEquals(x.interiorContains(y), expectedRelation.charAt(1) == 'T'); + assertEquals(x.intersects(y), expectedRelation.charAt(2) == 'T'); + assertEquals(x.interiorIntersects(y), expectedRelation.charAt(3) == 'T'); + + assertEquals(x.contains(y), x.union(y).equals(x)); + assertEquals(x.intersects(y), !x.intersection(y).isEmpty()); + } + + public void testBasic() { + // Constructors and accessors. + R1Interval unit = new R1Interval(0, 1); + R1Interval negunit = new R1Interval(-1, 0); + assertEquals(unit.lo(), 0.0); + assertEquals(unit.hi(), 1.0); + assertEquals(negunit.bound(0), -1.0); + assertEquals(negunit.bound(1), 0.0); + R1Interval ten = new R1Interval(0, 0); + ten.setHi(10); + assertEquals(ten.hi(), 10.0); + + // is_empty() + R1Interval half = new R1Interval(0.5, 0.5); + assertTrue(!unit.isEmpty()); + assertTrue(!half.isEmpty()); + R1Interval empty = R1Interval.empty(); + assertTrue(empty.isEmpty()); + + // GetCenter(), GetLength() + assertEquals(unit.getCenter(), 0.5); + assertEquals(half.getCenter(), 0.5); + assertEquals(negunit.getLength(), 1.0); + assertEquals(half.getLength(), 0.0); + assertTrue(empty.getLength() < 0); + + // contains(double), interiorContains(double) + assertTrue(unit.contains(0.5)); + assertTrue(unit.interiorContains(0.5)); + assertTrue(unit.contains(0)); + assertTrue(!unit.interiorContains(0)); + assertTrue(unit.contains(1)); + assertTrue(!unit.interiorContains(1)); + + // contains(R1Interval), interiorContains(R1Interval) + // Intersects(R1Interval), InteriorIntersects(R1Interval) + testIntervalOps(empty, empty, "TTFF"); + testIntervalOps(empty, unit, "FFFF"); + testIntervalOps(unit, half, "TTTT"); + testIntervalOps(unit, unit, "TFTT"); + testIntervalOps(unit, empty, "TTFF"); + testIntervalOps(unit, negunit, "FFTF"); + testIntervalOps(unit, new R1Interval(0, 0.5), "TFTT"); + testIntervalOps(half, new R1Interval(0, 0.5), "FFTF"); + + // addPoint() + R1Interval r; + r = empty.addPoint(5); + assertTrue(r.lo() == 5.0 && r.hi() == 5.0); + r = r.addPoint(-1); + assertTrue(r.lo() == -1.0 && r.hi() == 5.0); + r = r.addPoint(0); + assertTrue(r.lo() == -1.0 && r.hi() == 5.0); + + // fromPointPair() + assertEquals(R1Interval.fromPointPair(4, 4), new R1Interval(4, 4)); + assertEquals(R1Interval.fromPointPair(-1, -2), new R1Interval(-2, -1)); + assertEquals(R1Interval.fromPointPair(-5, 3), new R1Interval(-5, 3)); + + // expanded() + assertEquals(empty.expanded(0.45), empty); + assertEquals(unit.expanded(0.5), new R1Interval(-0.5, 1.5)); + + // union(), intersection() + assertTrue(new R1Interval(99, 100).union(empty).equals(new R1Interval(99, 100))); + assertTrue(empty.union(new R1Interval(99, 100)).equals(new R1Interval(99, 100))); + assertTrue(new R1Interval(5, 3).union(new R1Interval(0, -2)).isEmpty()); + assertTrue(new R1Interval(0, -2).union(new R1Interval(5, 3)).isEmpty()); + assertTrue(unit.union(unit).equals(unit)); + assertTrue(unit.union(negunit).equals(new R1Interval(-1, 1))); + assertTrue(negunit.union(unit).equals(new R1Interval(-1, 1))); + assertTrue(half.union(unit).equals(unit)); + assertTrue(unit.intersection(half).equals(half)); + assertTrue(unit.intersection(negunit).equals(new R1Interval(0, 0))); + assertTrue(negunit.intersection(half).isEmpty()); + assertTrue(unit.intersection(empty).isEmpty()); + assertTrue(empty.intersection(unit).isEmpty()); + } +} diff --git a/tests/com/google/common/geometry/S1AngleTest.java b/tests/com/google/common/geometry/S1AngleTest.java new file mode 100644 index 0000000..ad97e59 --- /dev/null +++ b/tests/com/google/common/geometry/S1AngleTest.java @@ -0,0 +1,44 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import junit.framework.TestCase; + +public strictfp class S1AngleTest extends TestCase { + + + public void testBasic() { + // Check that the conversion between Pi radians and 180 degrees is exact. + assertEquals(S1Angle.radians(Math.PI).radians(), Math.PI); + assertEquals(S1Angle.radians(Math.PI).degrees(), 180.0); + assertEquals(S1Angle.degrees(180).radians(), Math.PI); + assertEquals(S1Angle.degrees(180).degrees(), 180.0); + + assertEquals(S1Angle.radians(Math.PI / 2).degrees(), 90.0); + + // Check negative angles. + assertEquals(S1Angle.radians(-Math.PI / 2).degrees(), -90.0); + assertEquals(S1Angle.degrees(-45).radians(), -Math.PI / 4); + + // Check that E5/E6/E7 representations work as expected. + assertEquals(S1Angle.e5(2000000), S1Angle.degrees(20)); + assertEquals(S1Angle.e6(-60000000), S1Angle.degrees(-60)); + assertEquals(S1Angle.e7(750000000), S1Angle.degrees(75)); + assertEquals(S1Angle.degrees(12.34567).e5(), 1234567); + assertEquals(S1Angle.degrees(12.345678).e6(), 12345678); + assertEquals(S1Angle.degrees(-12.3456789).e7(), -123456789); + } +} diff --git a/tests/com/google/common/geometry/S1IntervalTest.java b/tests/com/google/common/geometry/S1IntervalTest.java new file mode 100644 index 0000000..146ec74 --- /dev/null +++ b/tests/com/google/common/geometry/S1IntervalTest.java @@ -0,0 +1,328 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import java.util.Arrays; + +public strictfp class S1IntervalTest extends GeometryTestCase { + + private void testIntervalOps(S1Interval x, S1Interval y, String expectedRelation, + S1Interval expectedUnion, S1Interval expectedIntersection) { + // Test all of the interval operations on the given pair of intervals. + // "expected_relation" is a sequence of "T" and "F" characters corresponding + // to the expected results of Contains(), InteriorContains(), Intersects(), + // and InteriorIntersects() respectively. + + assertEquals(x.contains(y), expectedRelation.charAt(0) == 'T'); + assertEquals(x.interiorContains(y), expectedRelation.charAt(1) == 'T'); + assertEquals(x.intersects(y), expectedRelation.charAt(2) == 'T'); + assertEquals(x.interiorIntersects(y), expectedRelation.charAt(3) == 'T'); + + // bounds() returns a const reference to a member variable, so we need to + // make a copy when invoking it on a temporary object. + assertTrue(Arrays.equals(x.union(y).bounds(), expectedUnion.bounds())); + assertTrue(Arrays.equals(x.intersection(y).bounds(), expectedIntersection.bounds())); + + assertEquals(x.contains(y), x.union(y) == x); + assertEquals(x.intersects(y), !x.intersection(y).isEmpty()); + + if (y.lo() == y.hi()) { + S1Interval r = x.addPoint(y.lo()); + assertTrue(Arrays.equals(r.bounds(), expectedUnion.bounds())); + } + } + + public void testBasic() { + // "Quadrants" are numbered as follows: + // quad1 == [0, Pi/2] + // quad2 == [Pi/2, Pi] + // quad3 == [-Pi, -Pi/2] + // quad4 == [-Pi/2, 0] + + // Constructors and accessors. + S1Interval quad12 = new S1Interval(0, -S2.M_PI); + assertEquals(quad12.lo(), 0.0); + assertEquals(quad12.hi(), S2.M_PI); + S1Interval quad34 = new S1Interval(-S2.M_PI, 0); + assertEquals(quad34.bound(0), S2.M_PI); + assertEquals(quad34.bound(1), 0.0); + S1Interval pi = new S1Interval(S2.M_PI, S2.M_PI); + assertEquals(pi.lo(), S2.M_PI); + assertEquals(pi.hi(), S2.M_PI); + S1Interval mipi = new S1Interval(-S2.M_PI, -S2.M_PI); + assertEquals(mipi.lo(), S2.M_PI); + assertEquals(mipi.hi(), S2.M_PI); + S1Interval quad23 = new S1Interval(S2.M_PI_2, -S2.M_PI_2); // inverted + assertEquals(quad23.lo(), S2.M_PI_2); + assertEquals(quad23.hi(), -S2.M_PI_2); + S1Interval quad1 = new S1Interval(0, 0); + quad1.setHi(S2.M_PI_2); + assertEquals(quad1.hi(), S2.M_PI_2); + + // is_valid(), is_empty(), is_inverted() + S1Interval zero = new S1Interval(0, 0); + assertTrue(zero.isValid() && !zero.isEmpty() && !zero.isFull()); + S1Interval empty = S1Interval.empty(); + assertTrue(empty.isValid() && empty.isEmpty() && !empty.isFull()); + assertTrue(empty.isInverted()); + S1Interval full = S1Interval.full(); + assertTrue(full.isValid() && !full.isEmpty() && full.isFull()); + assertTrue(!quad12.isEmpty() && !quad12.isFull() && !quad12.isInverted()); + assertTrue(!quad23.isEmpty() && !quad23.isFull() && quad23.isInverted()); + assertTrue(pi.isValid() && !pi.isEmpty() && !pi.isInverted()); + assertTrue(mipi.isValid() && !mipi.isEmpty() && !mipi.isInverted()); + + // GetCenter(), GetLength() + assertEquals(quad12.getCenter(), S2.M_PI_2); + assertEquals(quad12.getLength(), S2.M_PI); + assertDoubleNear(new S1Interval(3.1, 2.9).getCenter(), 3.0 - S2.M_PI); + assertDoubleNear(new S1Interval(-2.9, -3.1).getCenter(), S2.M_PI - 3.0); + assertDoubleNear(new S1Interval(2.1, -2.1).getCenter(), S2.M_PI); + assertEquals(pi.getCenter(), S2.M_PI); + assertEquals(pi.getLength(), 0.0); + assertEquals(mipi.getCenter(), S2.M_PI); + assertEquals(mipi.getLength(), 0.0); + assertEquals(Math.abs(quad23.getCenter()), S2.M_PI); + assertEquals(Math.abs(quad23.getLength()), S2.M_PI); + S1Interval quad123 = new S1Interval(0, -S2.M_PI_2); + assertDoubleNear(quad123.getCenter(), 0.75 * S2.M_PI); + assertDoubleNear(quad123.getLength(), 1.5 * S2.M_PI); + assertTrue(empty.getLength() < 0); + assertEquals(full.getLength(), 2 * S2.M_PI); + + // Complement() + assertTrue(empty.complement().isFull()); + assertTrue(full.complement().isEmpty()); + assertTrue(pi.complement().isFull()); + assertTrue(mipi.complement().isFull()); + assertTrue(zero.complement().isFull()); + assertTrue(quad12.complement().approxEquals(quad34)); + assertTrue(quad34.complement().approxEquals(quad12)); + S1Interval quad4 = new S1Interval(-S2.M_PI_2, 0); + assertTrue(quad123.complement().approxEquals(quad4)); + S1Interval quad234 = new S1Interval(S2.M_PI_2, 0); + + // Contains(double), InteriorContains(double) + assertTrue(!empty.contains(0) && !empty.contains(S2.M_PI) && !empty.contains(-S2.M_PI)); + assertTrue(!empty.interiorContains(S2.M_PI) && !empty.interiorContains(-S2.M_PI)); + assertTrue(full.contains(0) && full.contains(S2.M_PI) && full.contains(-S2.M_PI)); + assertTrue(full.interiorContains(S2.M_PI) && full.interiorContains(-S2.M_PI)); + assertTrue(quad12.contains(0) && quad12.contains(S2.M_PI) && quad12.contains(-S2.M_PI)); + assertTrue(quad12.interiorContains(S2.M_PI_2) && !quad12.interiorContains(0)); + assertTrue(!quad12.interiorContains(S2.M_PI) && !quad12.interiorContains(-S2.M_PI)); + assertTrue(quad23.contains(S2.M_PI_2) && quad23.contains(-S2.M_PI_2)); + assertTrue(quad23.contains(S2.M_PI) && quad23.contains(-S2.M_PI)); + assertTrue(!quad23.contains(0)); + assertTrue(!quad23.interiorContains(S2.M_PI_2) && !quad23.interiorContains(-S2.M_PI_2)); + assertTrue(quad23.interiorContains(S2.M_PI) && quad23.interiorContains(-S2.M_PI)); + assertTrue(!quad23.interiorContains(0)); + assertTrue(pi.contains(S2.M_PI) && pi.contains(-S2.M_PI) && !pi.contains(0)); + assertTrue(!pi.interiorContains(S2.M_PI) && !pi.interiorContains(-S2.M_PI)); + assertTrue(mipi.contains(S2.M_PI) && mipi.contains(-S2.M_PI) && !mipi.contains(0)); + assertTrue(!mipi.interiorContains(S2.M_PI) && !mipi.interiorContains(-S2.M_PI)); + assertTrue(zero.contains(0) && !zero.interiorContains(0)); + + // Contains(S1Interval), InteriorContains(S1Interval), + // Intersects(), InteriorIntersects(), Union(), Intersection() + S1Interval quad2 = new S1Interval(S2.M_PI_2, -S2.M_PI); + S1Interval quad3 = new S1Interval(S2.M_PI, -S2.M_PI_2); + S1Interval pi2 = new S1Interval(S2.M_PI_2, S2.M_PI_2); + S1Interval mipi2 = new S1Interval(-S2.M_PI_2, -S2.M_PI_2); + + testIntervalOps(empty, empty, "TTFF", empty, empty); + testIntervalOps(empty, full, "FFFF", full, empty); + testIntervalOps(empty, zero, "FFFF", zero, empty); + testIntervalOps(empty, pi, "FFFF", pi, empty); + testIntervalOps(empty, mipi, "FFFF", mipi, empty); + + testIntervalOps(full, empty, "TTFF", full, empty); + testIntervalOps(full, full, "TTTT", full, full); + testIntervalOps(full, zero, "TTTT", full, zero); + testIntervalOps(full, pi, "TTTT", full, pi); + testIntervalOps(full, mipi, "TTTT", full, mipi); + testIntervalOps(full, quad12, "TTTT", full, quad12); + testIntervalOps(full, quad23, "TTTT", full, quad23); + + testIntervalOps(zero, empty, "TTFF", zero, empty); + testIntervalOps(zero, full, "FFTF", full, zero); + testIntervalOps(zero, zero, "TFTF", zero, zero); + testIntervalOps(zero, pi, "FFFF", new S1Interval(0, S2.M_PI), empty); + testIntervalOps(zero, pi2, "FFFF", quad1, empty); + testIntervalOps(zero, mipi, "FFFF", quad12, empty); + testIntervalOps(zero, mipi2, "FFFF", quad4, empty); + testIntervalOps(zero, quad12, "FFTF", quad12, zero); + testIntervalOps(zero, quad23, "FFFF", quad123, empty); + + testIntervalOps(pi2, empty, "TTFF", pi2, empty); + testIntervalOps(pi2, full, "FFTF", full, pi2); + testIntervalOps(pi2, zero, "FFFF", quad1, empty); + testIntervalOps(pi2, pi, "FFFF", new S1Interval(S2.M_PI_2, S2.M_PI), empty); + testIntervalOps(pi2, pi2, "TFTF", pi2, pi2); + testIntervalOps(pi2, mipi, "FFFF", quad2, empty); + testIntervalOps(pi2, mipi2, "FFFF", quad23, empty); + testIntervalOps(pi2, quad12, "FFTF", quad12, pi2); + testIntervalOps(pi2, quad23, "FFTF", quad23, pi2); + + testIntervalOps(pi, empty, "TTFF", pi, empty); + testIntervalOps(pi, full, "FFTF", full, pi); + testIntervalOps(pi, zero, "FFFF", new S1Interval(S2.M_PI, 0), empty); + testIntervalOps(pi, pi, "TFTF", pi, pi); + testIntervalOps(pi, pi2, "FFFF", new S1Interval(S2.M_PI_2, S2.M_PI), empty); + testIntervalOps(pi, mipi, "TFTF", pi, pi); + testIntervalOps(pi, mipi2, "FFFF", quad3, empty); + testIntervalOps(pi, quad12, "FFTF", new S1Interval(0, S2.M_PI), pi); + testIntervalOps(pi, quad23, "FFTF", quad23, pi); + + testIntervalOps(mipi, empty, "TTFF", mipi, empty); + testIntervalOps(mipi, full, "FFTF", full, mipi); + testIntervalOps(mipi, zero, "FFFF", quad34, empty); + testIntervalOps(mipi, pi, "TFTF", mipi, mipi); + testIntervalOps(mipi, pi2, "FFFF", quad2, empty); + testIntervalOps(mipi, mipi, "TFTF", mipi, mipi); + testIntervalOps(mipi, mipi2, "FFFF", new S1Interval(-S2.M_PI, -S2.M_PI_2), empty); + testIntervalOps(mipi, quad12, "FFTF", quad12, mipi); + testIntervalOps(mipi, quad23, "FFTF", quad23, mipi); + + testIntervalOps(quad12, empty, "TTFF", quad12, empty); + testIntervalOps(quad12, full, "FFTT", full, quad12); + testIntervalOps(quad12, zero, "TFTF", quad12, zero); + testIntervalOps(quad12, pi, "TFTF", quad12, pi); + testIntervalOps(quad12, mipi, "TFTF", quad12, mipi); + testIntervalOps(quad12, quad12, "TFTT", quad12, quad12); + testIntervalOps(quad12, quad23, "FFTT", quad123, quad2); + testIntervalOps(quad12, quad34, "FFTF", full, quad12); + + testIntervalOps(quad23, empty, "TTFF", quad23, empty); + testIntervalOps(quad23, full, "FFTT", full, quad23); + testIntervalOps(quad23, zero, "FFFF", quad234, empty); + testIntervalOps(quad23, pi, "TTTT", quad23, pi); + testIntervalOps(quad23, mipi, "TTTT", quad23, mipi); + testIntervalOps(quad23, quad12, "FFTT", quad123, quad2); + testIntervalOps(quad23, quad23, "TFTT", quad23, quad23); + testIntervalOps(quad23, quad34, "FFTT", quad234, new S1Interval(-S2.M_PI, -S2.M_PI_2)); + + testIntervalOps(quad1, quad23, "FFTF", quad123, new S1Interval(S2.M_PI_2, S2.M_PI_2)); + testIntervalOps(quad2, quad3, "FFTF", quad23, mipi); + testIntervalOps(quad3, quad2, "FFTF", quad23, pi); + testIntervalOps(quad2, pi, "TFTF", quad2, pi); + testIntervalOps(quad2, mipi, "TFTF", quad2, mipi); + testIntervalOps(quad3, pi, "TFTF", quad3, pi); + testIntervalOps(quad3, mipi, "TFTF", quad3, mipi); + + S1Interval mid12 = new S1Interval(S2.M_PI_2 - 0.02, S2.M_PI_2 + 0.01); + S1Interval mid23 = new S1Interval(S2.M_PI - 0.01, -S2.M_PI + 0.02); + S1Interval mid34 = new S1Interval(-S2.M_PI_2 - 0.02, -S2.M_PI_2 + 0.01); + S1Interval mid41 = new S1Interval(-0.01, 0.02); + + S1Interval quad2hi = new S1Interval(mid23.lo(), quad12.hi()); + S1Interval quad1lo = new S1Interval(quad12.lo(), mid41.hi()); + S1Interval quad12eps = new S1Interval(quad12.lo(), mid23.hi()); + S1Interval quadeps12 = new S1Interval(mid41.lo(), quad12.hi()); + S1Interval quad123eps = new S1Interval(quad12.lo(), mid34.hi()); + testIntervalOps(quad12, mid12, "TTTT", quad12, mid12); + testIntervalOps(mid12, quad12, "FFTT", quad12, mid12); + testIntervalOps(quad12, mid23, "FFTT", quad12eps, quad2hi); + testIntervalOps(mid23, quad12, "FFTT", quad12eps, quad2hi); + testIntervalOps(quad12, mid34, "FFFF", quad123eps, empty); + testIntervalOps(mid34, quad12, "FFFF", quad123eps, empty); + testIntervalOps(quad12, mid41, "FFTT", quadeps12, quad1lo); + testIntervalOps(mid41, quad12, "FFTT", quadeps12, quad1lo); + + S1Interval quad2lo = new S1Interval(quad23.lo(), mid12.hi()); + S1Interval quad3hi = new S1Interval(mid34.lo(), quad23.hi()); + S1Interval quadeps23 = new S1Interval(mid12.lo(), quad23.hi()); + S1Interval quad23eps = new S1Interval(quad23.lo(), mid34.hi()); + S1Interval quadeps123 = new S1Interval(mid41.lo(), quad23.hi()); + testIntervalOps(quad23, mid12, "FFTT", quadeps23, quad2lo); + testIntervalOps(mid12, quad23, "FFTT", quadeps23, quad2lo); + testIntervalOps(quad23, mid23, "TTTT", quad23, mid23); + testIntervalOps(mid23, quad23, "FFTT", quad23, mid23); + testIntervalOps(quad23, mid34, "FFTT", quad23eps, quad3hi); + testIntervalOps(mid34, quad23, "FFTT", quad23eps, quad3hi); + testIntervalOps(quad23, mid41, "FFFF", quadeps123, empty); + testIntervalOps(mid41, quad23, "FFFF", quadeps123, empty); + + // AddPoint() + S1Interval r = S1Interval.empty(); + S1Interval res; + res = r.addPoint(0); + assertEquals(res, zero); + + res = r.addPoint(S2.M_PI); + assertEquals(res, pi); + + res = r.addPoint(-S2.M_PI); + assertEquals(res, mipi); + + res = r.addPoint(S2.M_PI); + res = res.addPoint(-S2.M_PI); + assertEquals(res, pi); + + res = res.addPoint(-S2.M_PI); + res.addPoint(S2.M_PI); + assertEquals(res, mipi); + + res = r.addPoint(mid12.lo()); + res = res.addPoint(mid12.hi()); + assertEquals(res, mid12); + + res = r.addPoint(mid23.lo()); + res = res.addPoint(mid23.hi()); + assertEquals(res, mid23); + + res = quad1.addPoint(-0.9 * S2.M_PI); + res = res.addPoint(-S2.M_PI_2); + assertEquals(res, quad123); + + r = S1Interval.full(); + res = r.addPoint(0); + assertTrue(res.isFull()); + + res = r.addPoint(S2.M_PI); + assertTrue(res.isFull()); + + res = r.addPoint(-S2.M_PI); + assertTrue(res.isFull()); + + // FromPointPair() + assertEquals(S1Interval.fromPointPair(-S2.M_PI, S2.M_PI), pi); + assertEquals(S1Interval.fromPointPair(S2.M_PI, -S2.M_PI), pi); + assertEquals(S1Interval.fromPointPair(mid34.hi(), mid34.lo()), mid34); + assertEquals(S1Interval.fromPointPair(mid23.lo(), mid23.hi()), mid23); + + // Expanded() + assertEquals(empty.expanded(1), empty); + assertEquals(full.expanded(1), full); + assertEquals(zero.expanded(1), new S1Interval(-1, 1)); + assertEquals(mipi.expanded(0.01), new S1Interval(S2.M_PI - 0.01, -S2.M_PI + 0.01)); + assertEquals(pi.expanded(27), full); + assertEquals(pi.expanded(S2.M_PI_2), quad23); + assertEquals(pi2.expanded(S2.M_PI_2), quad12); + assertEquals(mipi2.expanded(S2.M_PI_2), quad34); + + // ApproxEquals() + assertTrue(empty.approxEquals(empty)); + assertTrue(zero.approxEquals(empty) && empty.approxEquals(zero)); + assertTrue(pi.approxEquals(empty) && empty.approxEquals(pi)); + assertTrue(mipi.approxEquals(empty) && empty.approxEquals(mipi)); + assertTrue(pi.approxEquals(mipi) && mipi.approxEquals(pi)); + assertTrue(pi.union(mipi).approxEquals(pi)); + assertTrue(mipi.union(pi).approxEquals(pi)); + assertTrue(pi.union(mid12).union(zero).approxEquals(quad12)); + assertTrue(quad2.intersection(quad3).approxEquals(pi)); + assertTrue(quad3.intersection(quad2).approxEquals(pi)); + } +} diff --git a/tests/com/google/common/geometry/S2CapTest.java b/tests/com/google/common/geometry/S2CapTest.java new file mode 100644 index 0000000..bc39ef0 --- /dev/null +++ b/tests/com/google/common/geometry/S2CapTest.java @@ -0,0 +1,231 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +public strictfp class S2CapTest extends GeometryTestCase { + + public S2Point getLatLngPoint(double latDegrees, double lngDegrees) { + return S2LatLng.fromDegrees(latDegrees, lngDegrees).toPoint(); + } + + // About 9 times the double-precision roundoff relative error. + public static final double EPS = 1e-15; + + public void testBasic() { + // Test basic properties of empty and full caps. + S2Cap empty = S2Cap.empty(); + S2Cap full = S2Cap.full(); + assertTrue(empty.isValid()); + assertTrue(empty.isEmpty()); + assertTrue(empty.complement().isFull()); + assertTrue(full.isValid()); + assertTrue(full.isFull()); + assertTrue(full.complement().isEmpty()); + assertEquals(full.height(), 2.0); + assertDoubleNear(full.angle().degrees(), 180); + + // Containment and intersection of empty and full caps. + assertTrue(empty.contains(empty)); + assertTrue(full.contains(empty)); + assertTrue(full.contains(full)); + assertTrue(!empty.interiorIntersects(empty)); + assertTrue(full.interiorIntersects(full)); + assertTrue(!full.interiorIntersects(empty)); + + // Singleton cap containing the x-axis. + S2Cap xaxis = S2Cap.fromAxisHeight(new S2Point(1, 0, 0), 0); + assertTrue(xaxis.contains(new S2Point(1, 0, 0))); + assertTrue(!xaxis.contains(new S2Point(1, 1e-20, 0))); + assertEquals(xaxis.angle().radians(), 0.0); + + // Singleton cap containing the y-axis. + S2Cap yaxis = S2Cap.fromAxisAngle(new S2Point(0, 1, 0), S1Angle.radians(0)); + assertTrue(!yaxis.contains(xaxis.axis())); + assertEquals(xaxis.height(), 0.0); + + // Check that the complement of a singleton cap is the full cap. + S2Cap xcomp = xaxis.complement(); + assertTrue(xcomp.isValid()); + assertTrue(xcomp.isFull()); + assertTrue(xcomp.contains(xaxis.axis())); + + // Check that the complement of the complement is *not* the original. + assertTrue(xcomp.complement().isValid()); + assertTrue(xcomp.complement().isEmpty()); + assertTrue(!xcomp.complement().contains(xaxis.axis())); + + // Check that very small caps can be represented accurately. + // Here "kTinyRad" is small enough that unit vectors perturbed by this + // amount along a tangent do not need to be renormalized. + final double kTinyRad = 1e-10; + S2Cap tiny = + S2Cap.fromAxisAngle(S2Point.normalize(new S2Point(1, 2, 3)), S1Angle.radians(kTinyRad)); + S2Point tangent = S2Point.normalize(S2Point.crossProd(tiny.axis(), new S2Point(3, 2, 1))); + assertTrue(tiny.contains(S2Point.add(tiny.axis(), S2Point.mul(tangent, 0.99 * kTinyRad)))); + assertTrue(!tiny.contains(S2Point.add(tiny.axis(), S2Point.mul(tangent, 1.01 * kTinyRad)))); + + // Basic tests on a hemispherical cap. + S2Cap hemi = S2Cap.fromAxisHeight(S2Point.normalize(new S2Point(1, 0, 1)), 1); + assertEquals(hemi.complement().axis(), S2Point.neg(hemi.axis())); + assertEquals(hemi.complement().height(), 1.0); + assertTrue(hemi.contains(new S2Point(1, 0, 0))); + assertTrue(!hemi.complement().contains(new S2Point(1, 0, 0))); + assertTrue(hemi.contains(S2Point.normalize(new S2Point(1, 0, -(1 - EPS))))); + assertTrue(!hemi.interiorContains(S2Point.normalize(new S2Point(1, 0, -(1 + EPS))))); + + // A concave cap. + S2Cap concave = S2Cap.fromAxisAngle(getLatLngPoint(80, 10), S1Angle.degrees(150)); + assertTrue(concave.contains(getLatLngPoint(-70 * (1 - EPS), 10))); + assertTrue(!concave.contains(getLatLngPoint(-70 * (1 + EPS), 10))); + assertTrue(concave.contains(getLatLngPoint(-50 * (1 - EPS), -170))); + assertTrue(!concave.contains(getLatLngPoint(-50 * (1 + EPS), -170))); + + // Cap containment tests. + assertTrue(!empty.contains(xaxis)); + assertTrue(!empty.interiorIntersects(xaxis)); + assertTrue(full.contains(xaxis)); + assertTrue(full.interiorIntersects(xaxis)); + assertTrue(!xaxis.contains(full)); + assertTrue(!xaxis.interiorIntersects(full)); + assertTrue(xaxis.contains(xaxis)); + assertTrue(!xaxis.interiorIntersects(xaxis)); + assertTrue(xaxis.contains(empty)); + assertTrue(!xaxis.interiorIntersects(empty)); + assertTrue(hemi.contains(tiny)); + assertTrue(hemi.contains( + S2Cap.fromAxisAngle(new S2Point(1, 0, 0), S1Angle.radians(S2.M_PI_4 - EPS)))); + assertTrue(!hemi.contains( + S2Cap.fromAxisAngle(new S2Point(1, 0, 0), S1Angle.radians(S2.M_PI_4 + EPS)))); + assertTrue(concave.contains(hemi)); + assertTrue(concave.interiorIntersects(hemi.complement())); + assertTrue(!concave.contains(S2Cap.fromAxisHeight(S2Point.neg(concave.axis()), 0.1))); + } + + public void testRectBound() { + // Empty and full caps. + assertTrue(S2Cap.empty().getRectBound().isEmpty()); + assertTrue(S2Cap.full().getRectBound().isFull()); + + final double kDegreeEps = 1e-13; + // Maximum allowable error for latitudes and longitudes measured in + // degrees. (assertDoubleNear uses a fixed tolerance that is too small.) + + // Cap that includes the south pole. + S2LatLngRect rect = + S2Cap.fromAxisAngle(getLatLngPoint(-45, 57), S1Angle.degrees(50)).getRectBound(); + assertDoubleNear(rect.latLo().degrees(), -90, kDegreeEps); + assertDoubleNear(rect.latHi().degrees(), 5, kDegreeEps); + assertTrue(rect.lng().isFull()); + + // Cap that is tangent to the north pole. + rect = S2Cap.fromAxisAngle(S2Point.normalize(new S2Point(1, 0, 1)), S1Angle.radians(S2.M_PI_4)) + .getRectBound(); + assertDoubleNear(rect.lat().lo(), 0); + assertDoubleNear(rect.lat().hi(), S2.M_PI_2); + assertTrue(rect.lng().isFull()); + + rect = S2Cap + .fromAxisAngle(S2Point.normalize(new S2Point(1, 0, 1)), S1Angle.degrees(45)).getRectBound(); + assertDoubleNear(rect.latLo().degrees(), 0, kDegreeEps); + assertDoubleNear(rect.latHi().degrees(), 90, kDegreeEps); + assertTrue(rect.lng().isFull()); + + // The eastern hemisphere. + rect = S2Cap + .fromAxisAngle(new S2Point(0, 1, 0), S1Angle.radians(S2.M_PI_2 + 5e-16)).getRectBound(); + assertDoubleNear(rect.latLo().degrees(), -90, kDegreeEps); + assertDoubleNear(rect.latHi().degrees(), 90, kDegreeEps); + assertTrue(rect.lng().isFull()); + + // A cap centered on the equator. + rect = S2Cap.fromAxisAngle(getLatLngPoint(0, 50), S1Angle.degrees(20)).getRectBound(); + assertDoubleNear(rect.latLo().degrees(), -20, kDegreeEps); + assertDoubleNear(rect.latHi().degrees(), 20, kDegreeEps); + assertDoubleNear(rect.lngLo().degrees(), 30, kDegreeEps); + assertDoubleNear(rect.lngHi().degrees(), 70, kDegreeEps); + + // A cap centered on the north pole. + rect = S2Cap.fromAxisAngle(getLatLngPoint(90, 123), S1Angle.degrees(10)).getRectBound(); + assertDoubleNear(rect.latLo().degrees(), 80, kDegreeEps); + assertDoubleNear(rect.latHi().degrees(), 90, kDegreeEps); + assertTrue(rect.lng().isFull()); + } + + public void testCells() { + // For each cube face, we construct some cells on + // that face and some caps whose positions are relative to that face, + // and then check for the expected intersection/containment results. + + // The distance from the center of a face to one of its vertices. + final double kFaceRadius = Math.atan(S2.M_SQRT2); + + for (int face = 0; face < 6; ++face) { + // The cell consisting of the entire face. + S2Cell rootCell = S2Cell.fromFacePosLevel(face, (byte) 0, 0); + + // A leaf cell at the midpoint of the v=1 edge. + S2Cell edgeCell = new S2Cell(S2Projections.faceUvToXyz(face, 0, 1 - EPS)); + + // A leaf cell at the u=1, v=1 corner. + S2Cell cornerCell = new S2Cell(S2Projections.faceUvToXyz(face, 1 - EPS, 1 - EPS)); + + // Quick check for full and empty caps. + assertTrue(S2Cap.full().contains(rootCell)); + assertTrue(!S2Cap.empty().mayIntersect(rootCell)); + + // Check intersections with the bounding caps of the leaf cells that are + // adjacent to 'corner_cell' along the Hilbert curve. Because this corner + // is at (u=1,v=1), the curve stays locally within the same cube face. + S2CellId first = cornerCell.id().prev().prev().prev(); + S2CellId last = cornerCell.id().next().next().next().next(); + for (S2CellId id = first; id.lessThan(last); id = id.next()) { + S2Cell cell = new S2Cell(id); + assertEquals(cell.getCapBound().contains(cornerCell), id.equals(cornerCell.id())); + assertEquals( + cell.getCapBound().mayIntersect(cornerCell), id.parent().contains(cornerCell.id())); + } + + int antiFace = (face + 3) % 6; // Opposite face. + for (int capFace = 0; capFace < 6; ++capFace) { + // A cap that barely contains all of 'cap_face'. + S2Point center = S2Projections.getNorm(capFace); + S2Cap covering = S2Cap.fromAxisAngle(center, S1Angle.radians(kFaceRadius + EPS)); + assertEquals(covering.contains(rootCell), capFace == face); + assertEquals(covering.mayIntersect(rootCell), capFace != antiFace); + assertEquals(covering.contains(edgeCell), center.dotProd(edgeCell.getCenter()) > 0.1); + assertEquals(covering.contains(edgeCell), covering.mayIntersect(edgeCell)); + assertEquals(covering.contains(cornerCell), capFace == face); + assertEquals( + covering.mayIntersect(cornerCell), center.dotProd(cornerCell.getCenter()) > 0); + + // A cap that barely intersects the edges of 'cap_face'. + S2Cap bulging = S2Cap.fromAxisAngle(center, S1Angle.radians(S2.M_PI_4 + EPS)); + assertTrue(!bulging.contains(rootCell)); + assertEquals(bulging.mayIntersect(rootCell), capFace != antiFace); + assertEquals(bulging.contains(edgeCell), capFace == face); + assertEquals(bulging.mayIntersect(edgeCell), center.dotProd(edgeCell.getCenter()) > 0.1); + assertTrue(!bulging.contains(cornerCell)); + assertTrue(!bulging.mayIntersect(cornerCell)); + + // A singleton cap. + S2Cap singleton = S2Cap.fromAxisAngle(center, S1Angle.radians(0)); + assertEquals(singleton.mayIntersect(rootCell), capFace == face); + assertTrue(!singleton.mayIntersect(edgeCell)); + assertTrue(!singleton.mayIntersect(cornerCell)); + } + } + } +} diff --git a/tests/com/google/common/geometry/S2CellIdTest.java b/tests/com/google/common/geometry/S2CellIdTest.java new file mode 100644 index 0000000..ce089db --- /dev/null +++ b/tests/com/google/common/geometry/S2CellIdTest.java @@ -0,0 +1,293 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.logging.Logger; + +/** + */ +public strictfp class S2CellIdTest extends GeometryTestCase { + + private static final Logger logger = Logger.getLogger(S2CellIdTest.class.getName()); + + private S2CellId getCellId(double latDegrees, double lngDegrees) { + S2CellId id = S2CellId.fromLatLng(S2LatLng.fromDegrees(latDegrees, lngDegrees)); + logger.info(Long.toString(id.id(), 16)); + return id; + } + + public void testBasic() { + logger.info("TestBasic"); + // Check default constructor. + S2CellId id = new S2CellId(); + assertEquals(id.id(), 0); + assertTrue(!id.isValid()); + + // Check basic accessor methods. + id = S2CellId.fromFacePosLevel(3, 0x12345678, S2CellId.MAX_LEVEL - 4); + assertTrue(id.isValid()); + assertEquals(id.face(), 3); + // assertEquals(id.pos(), 0x12345700); + assertEquals(id.level(), S2CellId.MAX_LEVEL - 4); + assertTrue(!id.isLeaf()); + + // Check face definitions + assertEquals(getCellId(0, 0).face(), 0); + assertEquals(getCellId(0, 90).face(), 1); + assertEquals(getCellId(90, 0).face(), 2); + assertEquals(getCellId(0, 180).face(), 3); + assertEquals(getCellId(0, -90).face(), 4); + assertEquals(getCellId(-90, 0).face(), 5); + + // Check parent/child relationships. + assertEquals(id.childBegin(id.level() + 2).pos(), 0x12345610); + assertEquals(id.childBegin().pos(), 0x12345640); + assertEquals(id.parent().pos(), 0x12345400); + assertEquals(id.parent(id.level() - 2).pos(), 0x12345000); + + // Check ordering of children relative to parents. + assertTrue(id.childBegin().lessThan(id)); + assertTrue(id.childEnd().greaterThan(id)); + assertEquals(id.childBegin().next().next().next().next(), id.childEnd()); + assertEquals(id.childBegin(S2CellId.MAX_LEVEL), id.rangeMin()); + assertEquals(id.childEnd(S2CellId.MAX_LEVEL), id.rangeMax().next()); + + // Check wrapping from beginning of Hilbert curve to end and vice versa. + assertEquals(S2CellId.begin(0).prevWrap(), S2CellId.end(0).prev()); + + assertEquals(S2CellId.begin(S2CellId.MAX_LEVEL).prevWrap(), + S2CellId.fromFacePosLevel(5, ~0L >>> S2CellId.FACE_BITS, S2CellId.MAX_LEVEL)); + + assertEquals(S2CellId.end(4).prev().nextWrap(), S2CellId.begin(4)); + assertEquals(S2CellId.end(S2CellId.MAX_LEVEL).prev().nextWrap(), + S2CellId.fromFacePosLevel(0, 0, S2CellId.MAX_LEVEL)); + + // Check that cells are represented by the position of their center + // along the Hilbert curve. + assertEquals(id.rangeMin().id() + id.rangeMax().id(), 2 * id.id()); + } + + public void testInverses() { + logger.info("TestInverses"); + // Check the conversion of random leaf cells to S2LatLngs and back. + for (int i = 0; i < 200000; ++i) { + S2CellId id = getRandomCellId(S2CellId.MAX_LEVEL); + assertTrue(id.isLeaf() && id.level() == S2CellId.MAX_LEVEL); + S2LatLng center = id.toLatLng(); + assertEquals(S2CellId.fromLatLng(center).id(), id.id()); + } + } + + + public void testToToken() { + assertEquals("000000000000010a", new S2CellId(266).toToken()); + assertEquals("80855c", new S2CellId(-9185834709882503168L).toToken()); + } + + public void testTokens() { + logger.info("TestTokens"); + + // Test random cell ids at all levels. + for (int i = 0; i < 10000; ++i) { + S2CellId id = getRandomCellId(); + if (!id.isValid()) { + continue; + } + String token = id.toToken(); + assertTrue(token.length() <= 16); + assertEquals(S2CellId.fromToken(token), id); + } + // Check that invalid cell ids can be encoded. + String token = S2CellId.none().toToken(); + assertEquals(S2CellId.fromToken(token), S2CellId.none()); + } + + private static final int kMaxExpandLevel = 3; + + private void expandCell( + S2CellId parent, ArrayList<S2CellId> cells, Map<S2CellId, S2CellId> parentMap) { + cells.add(parent); + if (parent.level() == kMaxExpandLevel) { + return; + } + MutableInteger i = new MutableInteger(0); + MutableInteger j = new MutableInteger(0); + MutableInteger orientation = new MutableInteger(0); + int face = parent.toFaceIJOrientation(i, j, orientation); + assertEquals(face, parent.face()); + + int pos = 0; + for (S2CellId child = parent.childBegin(); !child.equals(parent.childEnd()); + child = child.next()) { + // Do some basic checks on the children + assertEquals(child.level(), parent.level() + 1); + assertTrue(!child.isLeaf()); + MutableInteger childOrientation = new MutableInteger(0); + assertEquals(child.toFaceIJOrientation(i, j, childOrientation), face); + assertEquals( + childOrientation.intValue(), orientation.intValue() ^ S2.POS_TO_ORIENTATION[pos]); + + parentMap.put(child, parent); + expandCell(child, cells, parentMap); + ++pos; + } + } + + public void testContainment() { + logger.info("TestContainment"); + Map<S2CellId, S2CellId> parentMap = new HashMap<S2CellId, S2CellId>(); + ArrayList<S2CellId> cells = new ArrayList<S2CellId>(); + for (int face = 0; face < 6; ++face) { + expandCell(S2CellId.fromFacePosLevel(face, 0, 0), cells, parentMap); + } + for (int i = 0; i < cells.size(); ++i) { + for (int j = 0; j < cells.size(); ++j) { + boolean contained = true; + for (S2CellId id = cells.get(j); id != cells.get(i); id = parentMap.get(id)) { + if (!parentMap.containsKey(id)) { + contained = false; + break; + } + } + assertEquals(cells.get(i).contains(cells.get(j)), contained); + assertEquals(cells.get(j).greaterOrEquals(cells.get(i).rangeMin()) + && cells.get(j).lessOrEquals(cells.get(i).rangeMax()), contained); + assertEquals(cells.get(i).intersects(cells.get(j)), + cells.get(i).contains(cells.get(j)) || cells.get(j).contains(cells.get(i))); + } + } + } + + private static final int MAX_WALK_LEVEL = 8; + + public void testContinuity() { + logger.info("TestContinuity"); + // Make sure that sequentially increasing cell ids form a continuous + // path over the surface of the sphere, i.e. there are no + // discontinuous jumps from one region to another. + + double maxDist = S2Projections.MAX_EDGE.getValue(MAX_WALK_LEVEL); + S2CellId end = S2CellId.end(MAX_WALK_LEVEL); + S2CellId id = S2CellId.begin(MAX_WALK_LEVEL); + for (; !id.equals(end); id = id.next()) { + assertTrue(id.toPointRaw().angle(id.nextWrap().toPointRaw()) <= maxDist); + + // Check that the ToPointRaw() returns the center of each cell + // in (s,t) coordinates. + R2Vector uv = new R2Vector(); + S2Projections.xyzToFaceUV(id.toPointRaw(), uv); + assertDoubleNear( + Math.IEEEremainder(S2Projections.uvToST(uv.x), 1.0 / (1 << MAX_WALK_LEVEL)), 0); + assertDoubleNear( + Math.IEEEremainder(S2Projections.uvToST(uv.y), 1.0 / (1 << MAX_WALK_LEVEL)), 0); + } + } + + public void testCoverage() { + logger.info("TestCoverage"); + // Make sure that random points on the sphere can be represented to the + // expected level of accuracy, which in the worst case is sqrt(2/3) times + // the maximum arc length between the points on the sphere associated with + // adjacent values of "i" or "j". (It is sqrt(2/3) rather than 1/2 because + // the cells at the corners of each face are stretched -- they have 60 and + // 120 degree angles.) + + double maxDist = 0.5 * S2Projections.MAX_DIAG.getValue(S2CellId.MAX_LEVEL); + for (int i = 0; i < 1000000; ++i) { + // randomPoint(); + S2Point p = new S2Point(0.37861576725894824, 0.2772406863275093, 0.8830558887338725); + S2Point q = S2CellId.fromPoint(p).toPointRaw(); + + assertTrue(p.angle(q) <= maxDist); + } + } + + public void testAllNeighbors(S2CellId id, int level) { + assertTrue(level >= id.level() && level < S2CellId.MAX_LEVEL); + + // We compute GetAllNeighbors, and then add in all the children of "id" + // at the given level. We then compare this against the result of finding + // all the vertex neighbors of all the vertices of children of "id" at the + // given level. These should give the same result. + ArrayList<S2CellId> all = new ArrayList<S2CellId>(); + ArrayList<S2CellId> expected = new ArrayList<S2CellId>(); + id.getAllNeighbors(level, all); + S2CellId end = id.childEnd(level + 1); + for (S2CellId c = id.childBegin(level + 1); !c.equals(end); c = c.next()) { + all.add(c.parent()); + c.getVertexNeighbors(level, expected); + } + // Sort the results and eliminate duplicates. + Collections.sort(all); + Collections.sort(expected); + Set<S2CellId> allSet = new HashSet<S2CellId>(all); + Set<S2CellId> expectedSet = new HashSet<S2CellId>(expected); + assertTrue(allSet.equals(expectedSet)); + } + + public void testNeighbors() { + logger.info("TestNeighbors"); + + // Check the edge neighbors of face 1. + final int outFaces[] = {5, 3, 2, 0}; + S2CellId faceNbrs[] = new S2CellId[4]; + S2CellId.fromFacePosLevel(1, 0, 0).getEdgeNeighbors(faceNbrs); + for (int i = 0; i < 4; ++i) { + assertTrue(faceNbrs[i].isFace()); + assertEquals(faceNbrs[i].face(), outFaces[i]); + } + + // Check the vertex neighbors of the center of face 2 at level 5. + ArrayList<S2CellId> nbrs = new ArrayList<S2CellId>(); + S2CellId.fromPoint(new S2Point(0, 0, 1)).getVertexNeighbors(5, nbrs); + Collections.sort(nbrs); + for (int i = 0; i < 4; ++i) { + assertEquals(nbrs.get(i), S2CellId.fromFaceIJ( + 2, (1 << 29) - (i < 2 ? 1 : 0), (1 << 29) - ((i == 0 || i == 3) ? 1 : 0)).parent(5)); + } + nbrs.clear(); + + // Check the vertex neighbors of the corner of faces 0, 4, and 5. + S2CellId id = S2CellId.fromFacePosLevel(0, 0, S2CellId.MAX_LEVEL); + id.getVertexNeighbors(0, nbrs); + Collections.sort(nbrs); + assertEquals(nbrs.size(), 3); + assertEquals(nbrs.get(0), S2CellId.fromFacePosLevel(0, 0, 0)); + assertEquals(nbrs.get(1), S2CellId.fromFacePosLevel(4, 0, 0)); + assertEquals(nbrs.get(2), S2CellId.fromFacePosLevel(5, 0, 0)); + + // Check that GetAllNeighbors produces results that are consistent + // with GetVertexNeighbors for a bunch of random cells. + for (int i = 0; i < 1000; ++i) { + S2CellId id1 = getRandomCellId(); + if (id1.isLeaf()) { + id1 = id1.parent(); + } + + // TestAllNeighbors computes approximately 2**(2*(diff+1)) cell id1s, + // so it's not reasonable to use large values of "diff". + int maxDiff = Math.min(6, S2CellId.MAX_LEVEL - id1.level() - 1); + int level = id1.level() + random(maxDiff); + testAllNeighbors(id1, level); + } + } +} diff --git a/tests/com/google/common/geometry/S2CellTest.java b/tests/com/google/common/geometry/S2CellTest.java new file mode 100644 index 0000000..54c85a7 --- /dev/null +++ b/tests/com/google/common/geometry/S2CellTest.java @@ -0,0 +1,443 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public strictfp class S2CellTest extends GeometryTestCase { + + public static final boolean DEBUG_MODE = true; + + public void testFaces() { + Map<S2Point, Integer> edgeCounts = new HashMap<S2Point, Integer>(); + Map<S2Point, Integer> vertexCounts = new HashMap<S2Point, Integer>(); + for (int face = 0; face < 6; ++face) { + S2CellId id = S2CellId.fromFacePosLevel(face, 0, 0); + S2Cell cell = new S2Cell(id); + assertEquals(cell.id(), id); + assertEquals(cell.face(), face); + assertEquals(cell.level(), 0); + // Top-level faces have alternating orientations to get RHS coordinates. + assertEquals(cell.orientation(), face & S2.SWAP_MASK); + assertTrue(!cell.isLeaf()); + for (int k = 0; k < 4; ++k) { + if (edgeCounts.containsKey(cell.getEdgeRaw(k))) { + edgeCounts.put(cell.getEdgeRaw(k), edgeCounts.get(cell + .getEdgeRaw(k)) + 1); + } else { + edgeCounts.put(cell.getEdgeRaw(k), 1); + } + + if (vertexCounts.containsKey(cell.getVertexRaw(k))) { + vertexCounts.put(cell.getVertexRaw(k), vertexCounts.get(cell + .getVertexRaw(k)) + 1); + } else { + vertexCounts.put(cell.getVertexRaw(k), 1); + } + assertDoubleNear(cell.getVertexRaw(k).dotProd(cell.getEdgeRaw(k)), 0); + assertDoubleNear(cell.getVertexRaw((k + 1) & 3).dotProd( + cell.getEdgeRaw(k)), 0); + assertDoubleNear(S2Point.normalize( + S2Point.crossProd(cell.getVertexRaw(k), cell + .getVertexRaw((k + 1) & 3))).dotProd(cell.getEdge(k)), 1.0); + } + } + // Check that edges have multiplicity 2 and vertices have multiplicity 3. + for (Integer i : edgeCounts.values()) { + assertEquals(i.intValue(), 2); + } + for (Integer i : vertexCounts.values()) { + assertEquals(i.intValue(), 3); + } + } + + static class LevelStats { + double count; + double minArea, maxArea, avgArea; + double minWidth, maxWidth, avgWidth; + double minEdge, maxEdge, avgEdge, maxEdgeAspect; + double minDiag, maxDiag, avgDiag, maxDiagAspect; + double minAngleSpan, maxAngleSpan, avgAngleSpan; + double minApproxRatio, maxApproxRatio; + + LevelStats() { + count = 0; + minArea = 100; + maxArea = 0; + avgArea = 0; + minWidth = 100; + maxWidth = 0; + avgWidth = 0; + minEdge = 100; + maxEdge = 0; + avgEdge = 0; + maxEdgeAspect = 0; + minDiag = 100; + maxDiag = 0; + avgDiag = 0; + maxDiagAspect = 0; + minAngleSpan = 100; + maxAngleSpan = 0; + avgAngleSpan = 0; + minApproxRatio = 100; + maxApproxRatio = 0; + } + } + + static List<LevelStats> levelStats = new ArrayList<LevelStats>( + S2CellId.MAX_LEVEL + 1); + + static { + for (int i = 0; i < S2CellId.MAX_LEVEL + 1; ++i) { + levelStats.add(new LevelStats()); + } + } + + static void gatherStats(S2Cell cell) { + LevelStats s = levelStats.get(cell.level()); + double exactArea = cell.exactArea(); + double approxArea = cell.approxArea(); + double minEdge = 100, maxEdge = 0, avgEdge = 0; + double minDiag = 100, maxDiag = 0; + double minWidth = 100, maxWidth = 0; + double minAngleSpan = 100, maxAngleSpan = 0; + for (int i = 0; i < 4; ++i) { + double edge = cell.getVertexRaw(i).angle(cell.getVertexRaw((i + 1) & 3)); + minEdge = Math.min(edge, minEdge); + maxEdge = Math.max(edge, maxEdge); + avgEdge += 0.25 * edge; + S2Point mid = S2Point.add(cell.getVertexRaw(i), cell + .getVertexRaw((i + 1) & 3)); + double width = S2.M_PI_2 - mid.angle(cell.getEdgeRaw(i ^ 2)); + minWidth = Math.min(width, minWidth); + maxWidth = Math.max(width, maxWidth); + if (i < 2) { + double diag = cell.getVertexRaw(i).angle(cell.getVertexRaw(i ^ 2)); + minDiag = Math.min(diag, minDiag); + maxDiag = Math.max(diag, maxDiag); + double angleSpan = cell.getEdgeRaw(i).angle( + S2Point.neg(cell.getEdgeRaw(i ^ 2))); + minAngleSpan = Math.min(angleSpan, minAngleSpan); + maxAngleSpan = Math.max(angleSpan, maxAngleSpan); + } + } + s.count += 1; + s.minArea = Math.min(exactArea, s.minArea); + s.maxArea = Math.max(exactArea, s.maxArea); + s.avgArea += exactArea; + s.minWidth = Math.min(minWidth, s.minWidth); + s.maxWidth = Math.max(maxWidth, s.maxWidth); + s.avgWidth += 0.5 * (minWidth + maxWidth); + s.minEdge = Math.min(minEdge, s.minEdge); + s.maxEdge = Math.max(maxEdge, s.maxEdge); + s.avgEdge += avgEdge; + s.maxEdgeAspect = Math.max(maxEdge / minEdge, s.maxEdgeAspect); + s.minDiag = Math.min(minDiag, s.minDiag); + s.maxDiag = Math.max(maxDiag, s.maxDiag); + s.avgDiag += 0.5 * (minDiag + maxDiag); + s.maxDiagAspect = Math.max(maxDiag / minDiag, s.maxDiagAspect); + s.minAngleSpan = Math.min(minAngleSpan, s.minAngleSpan); + s.maxAngleSpan = Math.max(maxAngleSpan, s.maxAngleSpan); + s.avgAngleSpan += 0.5 * (minAngleSpan + maxAngleSpan); + double approxRatio = approxArea / exactArea; + s.minApproxRatio = Math.min(approxRatio, s.minApproxRatio); + s.maxApproxRatio = Math.max(approxRatio, s.maxApproxRatio); + } + + public void testSubdivide(S2Cell cell) { + gatherStats(cell); + if (cell.isLeaf()) { + return; + } + + S2Cell[] children = new S2Cell[4]; + for (int i = 0; i < children.length; ++i) { + children[i] = new S2Cell(); + } + assertTrue(cell.subdivide(children)); + S2CellId childId = cell.id().childBegin(); + double exactArea = 0; + double approxArea = 0; + double averageArea = 0; + for (int i = 0; i < 4; ++i, childId = childId.next()) { + exactArea += children[i].exactArea(); + approxArea += children[i].approxArea(); + averageArea += children[i].averageArea(); + + // Check that the child geometry is consistent with its cell id. + assertEquals(children[i].id(), childId); + assertTrue(children[i].getCenter().aequal(childId.toPoint(), 1e-15)); + S2Cell direct = new S2Cell(childId); + assertEquals(children[i].face(), direct.face()); + assertEquals(children[i].level(), direct.level()); + assertEquals(children[i].orientation(), direct.orientation()); + assertEquals(children[i].getCenterRaw(), direct.getCenterRaw()); + for (int k = 0; k < 4; ++k) { + assertEquals(children[i].getVertexRaw(k), direct.getVertexRaw(k)); + assertEquals(children[i].getEdgeRaw(k), direct.getEdgeRaw(k)); + } + + // Test Contains() and MayIntersect(). + assertTrue(cell.contains(children[i])); + assertTrue(cell.mayIntersect(children[i])); + assertTrue(!children[i].contains(cell)); + assertTrue(cell.contains(children[i].getCenterRaw())); + for (int j = 0; j < 4; ++j) { + assertTrue(cell.contains(children[i].getVertexRaw(j))); + if (j != i) { + assertTrue(!children[i].contains(children[j].getCenterRaw())); + assertTrue(!children[i].mayIntersect(children[j])); + } + } + + // Test GetCapBound and GetRectBound. + S2Cap parentCap = cell.getCapBound(); + S2LatLngRect parentRect = cell.getRectBound(); + if (cell.contains(new S2Point(0, 0, 1)) + || cell.contains(new S2Point(0, 0, -1))) { + assertTrue(parentRect.lng().isFull()); + } + S2Cap childCap = children[i].getCapBound(); + S2LatLngRect childRect = children[i].getRectBound(); + assertTrue(childCap.contains(children[i].getCenter())); + assertTrue(childRect.contains(children[i].getCenterRaw())); + assertTrue(parentCap.contains(children[i].getCenter())); + assertTrue(parentRect.contains(children[i].getCenterRaw())); + for (int j = 0; j < 4; ++j) { + assertTrue(childCap.contains(children[i].getVertex(j))); + assertTrue(childRect.contains(children[i].getVertex(j))); + assertTrue(childRect.contains(children[i].getVertexRaw(j))); + assertTrue(parentCap.contains(children[i].getVertex(j))); + if (!parentRect.contains(children[i].getVertex(j))) { + System.out.println("cell: " + cell + " i: " + i + " j: " + j); + System.out.println("Children " + i + ": " + children[i]); + System.out.println("Parent rect: " + parentRect); + System.out.println("Vertex raw(j) " + children[i].getVertex(j)); + System.out.println("Latlng of vertex: " + new S2LatLng(children[i].getVertex(j))); + cell.getRectBound(); + } + assertTrue(parentRect.contains(children[i].getVertex(j))); + if (!parentRect.contains(children[i].getVertexRaw(j))) { + System.out.println("cell: " + cell + " i: " + i + " j: " + j); + System.out.println("Children " + i + ": " + children[i]); + System.out.println("Parent rect: " + parentRect); + System.out.println("Vertex raw(j) " + children[i].getVertexRaw(j)); + System.out.println("Latlng of vertex: " + new S2LatLng(children[i].getVertexRaw(j))); + cell.getRectBound(); + } + assertTrue(parentRect.contains(children[i].getVertexRaw(j))); + if (j != i) { + // The bounding caps and rectangles should be tight enough so that + // they exclude at least two vertices of each adjacent cell. + int capCount = 0; + int rectCount = 0; + for (int k = 0; k < 4; ++k) { + if (childCap.contains(children[j].getVertex(k))) { + ++capCount; + } + if (childRect.contains(children[j].getVertexRaw(k))) { + ++rectCount; + } + } + assertTrue(capCount <= 2); + if (childRect.latLo().radians() > -S2.M_PI_2 + && childRect.latHi().radians() < S2.M_PI_2) { + // Bounding rectangles may be too large at the poles because the + // pole itself has an arbitrary fixed longitude. + assertTrue(rectCount <= 2); + } + } + } + + // Check all children for the first few levels, and then sample randomly. + // Also subdivide one corner cell, one edge cell, and one center cell + // so that we have a better chance of sample the minimum metric values. + boolean forceSubdivide = false; + S2Point center = S2Projections.getNorm(children[i].face()); + S2Point edge = S2Point.add(center, S2Projections.getUAxis(children[i].face())); + S2Point corner = S2Point.add(edge, S2Projections.getVAxis(children[i].face())); + for (int j = 0; j < 4; ++j) { + S2Point p = children[i].getVertexRaw(j); + if (p.equals(center) || p.equals(edge) || p.equals(corner)) { + forceSubdivide = true; + } + } + if (forceSubdivide || cell.level() < (DEBUG_MODE ? 5 : 6) + || random(DEBUG_MODE ? 10 : 4) == 0) { + testSubdivide(children[i]); + } + } + + // Check sum of child areas equals parent area. + // + // For ExactArea(), the best relative error we can expect is about 1e-6 + // because the precision of the unit vector coordinates is only about 1e-15 + // and the edge length of a leaf cell is about 1e-9. + // + // For ApproxArea(), the areas are accurate to within a few percent. + // + // For AverageArea(), the areas themselves are not very accurate, but + // the average area of a parent is exactly 4 times the area of a child. + + assertTrue(Math.abs(Math.log(exactArea / cell.exactArea())) <= Math + .abs(Math.log(1 + 1e-6))); + assertTrue(Math.abs(Math.log(approxArea / cell.approxArea())) <= Math + .abs(Math.log(1.03))); + assertTrue(Math.abs(Math.log(averageArea / cell.averageArea())) <= Math + .abs(Math.log(1 + 1e-15))); + } + + public void testMinMaxAvg(String label, int level, double count, + double absError, double minValue, double maxValue, double avgValue, + S2.Metric minMetric, S2.Metric maxMetric, S2.Metric avgMetric) { + + // All metrics are minimums, maximums, or averages of differential + // quantities, and therefore will not be exact for cells at any finite + // level. The differential minimum is always a lower bound, and the maximum + // is always an upper bound, but these minimums and maximums may not be + // achieved for two different reasons. First, the cells at each level are + // sampled and we may miss the most extreme examples. Second, the actual + // metric for a cell is obtained by integrating the differential quantity, + // which is not constant across the cell. Therefore cells at low levels + // (bigger cells) have smaller variations. + // + // The "tolerance" below is an attempt to model both of these effects. + // At low levels, error is dominated by the variation of differential + // quantities across the cells, while at high levels error is dominated by + // the effects of random sampling. + double tolerance = (maxMetric.getValue(level) - minMetric.getValue(level)) + / Math.sqrt(Math.min(count, 0.5 * (1L << level))) * 10; + if (tolerance == 0) { + tolerance = absError; + } + + double minError = minValue - minMetric.getValue(level); + double maxError = maxMetric.getValue(level) - maxValue; + double avgError = Math.abs(avgMetric.getValue(level) - avgValue); + System.out.printf( + "%-10s (%6.0f samples, tolerance %8.3g) - min (%9.3g : %9.3g) " + + "max (%9.3g : %9.3g), avg (%9.3g : %9.3g)\n", label, count, + tolerance, minError / minValue, minError / tolerance, maxError + / maxValue, maxError / tolerance, avgError / avgValue, avgError + / tolerance); + + assertTrue(minMetric.getValue(level) <= minValue + absError); + assertTrue(minMetric.getValue(level) >= minValue - tolerance); + System.out.println("Level: " + maxMetric.getValue(level) + " max " + (maxValue + tolerance)); + assertTrue(maxMetric.getValue(level) <= maxValue + tolerance); + assertTrue(maxMetric.getValue(level) >= maxValue - absError); + assertDoubleNear(avgMetric.getValue(level), avgValue, 10 * tolerance); + } + + public void testSubdivide() { + for (int face = 0; face < 6; ++face) { + testSubdivide(S2Cell.fromFacePosLevel(face, (byte) 0, 0)); + } + + // The maximum edge *ratio* is the ratio of the longest edge of any cell to + // the shortest edge of any cell at the same level (and similarly for the + // maximum diagonal ratio). + // + // The maximum edge *aspect* is the maximum ratio of the longest edge of a + // cell to the shortest edge of that same cell (and similarly for the + // maximum diagonal aspect). + + System.out + .printf("Level Area Edge Diag Approx Average\n"); + System.out + .printf(" Ratio Ratio Aspect Ratio Aspect Min Max Min Max\n"); + for (int i = 0; i <= S2CellId.MAX_LEVEL; ++i) { + LevelStats s = levelStats.get(i); + if (s.count > 0) { + s.avgArea /= s.count; + s.avgWidth /= s.count; + s.avgEdge /= s.count; + s.avgDiag /= s.count; + s.avgAngleSpan /= s.count; + } + System.out.printf( + "%5d %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f\n", i, + s.maxArea / s.minArea, s.maxEdge / s.minEdge, s.maxEdgeAspect, + s.maxDiag / s.minDiag, s.maxDiagAspect, s.minApproxRatio, + s.maxApproxRatio, S2Cell.averageArea(i) / s.maxArea, S2Cell + .averageArea(i) + / s.minArea); + } + + // Now check the validity of the S2 length and area metrics. + for (int i = 0; i <= S2CellId.MAX_LEVEL; ++i) { + LevelStats s = levelStats.get(i); + if (s.count == 0) { + continue; + } + + System.out.printf( + "Level %2d - metric (error/actual : error/tolerance)\n", i); + + // The various length calculations are only accurate to 1e-15 or so, + // so we need to allow for this amount of discrepancy with the theoretical + // minimums and maximums. The area calculation is accurate to about 1e-15 + // times the cell width. + testMinMaxAvg("area", i, s.count, 1e-15 * s.minWidth, s.minArea, + s.maxArea, s.avgArea, S2Projections.MIN_AREA, S2Projections.MAX_AREA, + S2Projections.AVG_AREA); + testMinMaxAvg("width", i, s.count, 1e-15, s.minWidth, s.maxWidth, + s.avgWidth, S2Projections.MIN_WIDTH, S2Projections.MAX_WIDTH, + S2Projections.AVG_WIDTH); + testMinMaxAvg("edge", i, s.count, 1e-15, s.minEdge, s.maxEdge, + s.avgEdge, S2Projections.MIN_EDGE, S2Projections.MAX_EDGE, + S2Projections.AVG_EDGE); + testMinMaxAvg("diagonal", i, s.count, 1e-15, s.minDiag, s.maxDiag, + s.avgDiag, S2Projections.MIN_DIAG, S2Projections.MAX_DIAG, + S2Projections.AVG_DIAG); + testMinMaxAvg("angle span", i, s.count, 1e-15, s.minAngleSpan, + s.maxAngleSpan, s.avgAngleSpan, S2Projections.MIN_ANGLE_SPAN, + S2Projections.MAX_ANGLE_SPAN, S2Projections.AVG_ANGLE_SPAN); + + // The aspect ratio calculations are ratios of lengths and are therefore + // less accurate at higher subdivision levels. + assertTrue(s.maxEdgeAspect <= S2Projections.MAX_EDGE_ASPECT + 1e-15 + * (1 << i)); + assertTrue(s.maxDiagAspect <= S2Projections.MAX_DIAG_ASPECT + 1e-15 + * (1 << i)); + } + } + + static final int MAX_LEVEL = DEBUG_MODE ? 6 : 10; + + public void expandChildren1(S2Cell cell) { + S2Cell[] children = new S2Cell[4]; + assertTrue(cell.subdivide(children)); + if (children[0].level() < MAX_LEVEL) { + for (int pos = 0; pos < 4; ++pos) { + expandChildren1(children[pos]); + } + } + } + + public void expandChildren2(S2Cell cell) { + S2CellId id = cell.id().childBegin(); + for (int pos = 0; pos < 4; ++pos, id = id.next()) { + S2Cell child = new S2Cell(id); + if (child.level() < MAX_LEVEL) { + expandChildren2(child); + } + } + } +} diff --git a/tests/com/google/common/geometry/S2CellUnionTest.java b/tests/com/google/common/geometry/S2CellUnionTest.java new file mode 100644 index 0000000..19e66a1 --- /dev/null +++ b/tests/com/google/common/geometry/S2CellUnionTest.java @@ -0,0 +1,440 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import com.google.common.collect.Lists; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.logging.Logger; + +public strictfp class S2CellUnionTest extends GeometryTestCase { + public static Logger logger = Logger.getLogger(S2CellUnionTest.class.getName()); + + public void testBasic() { + logger.info("TestBasic"); + + S2CellUnion empty = new S2CellUnion(); + ArrayList<S2CellId> ids = Lists.newArrayList(); + empty.initFromCellIds(ids); + assertEquals(0, empty.size()); + + S2CellId face1Id = S2CellId.fromFacePosLevel(1, 0, 0); + S2CellUnion face1Union = new S2CellUnion(); + ids.add(face1Id); + face1Union.initFromCellIds(ids); + assertEquals(1, face1Union.size()); + assertEquals(face1Id, face1Union.cellId(0)); + + S2CellId face2Id = S2CellId.fromFacePosLevel(2, 0, 0); + S2CellUnion face2Union = new S2CellUnion(); + ArrayList<Long> cellids = Lists.newArrayList(); + cellids.add(face2Id.id()); + face2Union.initFromIds(cellids); + assertEquals(1, face2Union.size()); + assertEquals(face2Id, face2Union.cellId(0)); + + S2Cell face1Cell = new S2Cell(face1Id); + S2Cell face2Cell = new S2Cell(face2Id); + assertTrue(face1Union.contains(face1Cell)); + assertTrue(!face1Union.contains(face2Cell)); + } + + public void testContainsCellUnion() { + logger.info("TestContainsCellUnion"); + + Set<S2CellId> randomCells = new HashSet<S2CellId>(); + for (int i = 0; i < 100; i++) { + randomCells.add(getRandomCellId(S2CellId.MAX_LEVEL)); + } + + S2CellUnion union = new S2CellUnion(); + union.initFromCellIds(Lists.newArrayList(randomCells)); + + // Add one more + while (!randomCells.add(getRandomCellId(S2CellId.MAX_LEVEL))) { + } + + S2CellUnion unionPlusOne = new S2CellUnion(); + unionPlusOne.initFromCellIds(Lists.newArrayList(randomCells)); + + assertTrue(unionPlusOne.contains(union)); + assertFalse(union.contains(unionPlusOne)); + + // Build the set of parent cells and check containment + Set<S2CellId> parents = new HashSet<S2CellId>(); + for (S2CellId cellId : union) { + parents.add(cellId.parent()); + } + + S2CellUnion parentUnion = new S2CellUnion(); + parentUnion.initFromCellIds(Lists.newArrayList(parents)); + + assertTrue(parentUnion.contains(union)); + assertFalse(union.contains(parentUnion)); + } + + private void addCells(S2CellId id, boolean selected, List<S2CellId> input, + ArrayList<S2CellId> expected) { + // Decides whether to add "id" and/or some of its descendants to the + // test case. If "selected" is true, then the region covered by "id" + // *must* be added to the test case (either by adding "id" itself, or + // some combination of its descendants, or both). If cell ids are to + // the test case "input", then the corresponding expected result after + // simplification is added to "expected". + + if (id.equals(S2CellId.none())) { + // Initial call: decide whether to add cell(s) from each face. + for (int face = 0; face < 6; ++face) { + addCells(S2CellId.fromFacePosLevel(face, 0, 0), false, input, expected); + } + return; + } + if (id.isLeaf()) { + // The rnd.OneIn() call below ensures that the parent of a leaf cell + // will always be selected (if we make it that far down the hierarchy). + assertTrue(selected); + input.add(id); + return; + } + // The following code ensures that the probability of selecting a cell + // at each level is approximately the same, i.e. we test normalization + // of cells at all levels. + if (!selected && random(S2CellId.MAX_LEVEL - id.level()) != 0) { + // Once a cell has been selected, the expected output is predetermined. + // We then make sure that cells are selected that will normalize to + // the desired output. + expected.add(id); + selected = true; + } + + // With the rnd.OneIn() constants below, this function adds an average + // of 5/6 * (kMaxLevel - level) cells to "input" where "level" is the + // level at which the cell was first selected (level 15 on average). + // Therefore the average number of input cells in a test case is about + // (5/6 * 15 * 6) = 75. The average number of output cells is about 6. + + // If a cell is selected, we add it to "input" with probability 5/6. + boolean added = false; + if (selected && random(6) != 0) { + input.add(id); + added = true; + } + int numChildren = 0; + S2CellId child = id.childBegin(); + for (int pos = 0; pos < 4; ++pos, child = child.next()) { + // If the cell is selected, on average we recurse on 4/12 = 1/3 child. + // This intentionally may result in a cell and some of its children + // being included in the test case. + // + // If the cell is not selected, on average we recurse on one child. + // We also make sure that we do not recurse on all 4 children, since + // then we might include all 4 children in the input case by accident + // (in which case the expected output would not be correct). + if (random(selected ? 12 : 4) == 0 && numChildren < 3) { + addCells(child, selected, input, expected); + ++numChildren; + } + // If this cell was selected but the cell itself was not added, we + // must ensure that all 4 children (or some combination of their + // descendents) are added. + if (selected && !added) { + addCells(child, selected, input, expected); + } + } + } + + public void testNormalize() { + logger.info("TestNormalize"); + + // Try a bunch of random test cases, and keep track of average + // statistics for normalization (to see if they agree with the + // analysis above). + S2CellUnion cellunion = new S2CellUnion(); + double inSum = 0, outSum = 0; + final int kIters = 2000; + for (int i = 0; i < kIters; ++i) { + ArrayList<S2CellId> input = Lists.newArrayList(); + ArrayList<S2CellId> expected = Lists.newArrayList(); + addCells(S2CellId.none(), false, input, expected); + inSum += input.size(); + outSum += expected.size(); + cellunion.initFromCellIds(input); + assertEquals(cellunion.size(), expected.size()); + + assertEquals(expected, cellunion.cellIds()); + + // Test GetCapBound(). + S2Cap cap = cellunion.getCapBound(); + for (int k = 0; k < cellunion.size(); ++k) { + assertTrue(cap.contains(new S2Cell(cellunion.cellId(k)))); + } + + // Test Contains(S2CellId) and Intersects(S2CellId). + for (int j = 0; j < input.size(); ++j) { + assertTrue(cellunion.contains(input.get(j))); + assertTrue(cellunion.intersects(input.get(j))); + if (!input.get(j).isFace()) { + assertTrue(cellunion.intersects(input.get(j).parent())); + if (input.get(j).level() > 1) { + assertTrue(cellunion.intersects(input.get(j).parent().parent())); + assertTrue(cellunion.intersects(input.get(j).parent(0))); + } + } + if (!input.get(j).isLeaf()) { + assertTrue(cellunion.contains(input.get(j).childBegin())); + assertTrue(cellunion.intersects(input.get(j).childBegin())); + assertTrue(cellunion.contains(input.get(j).childEnd().prev())); + assertTrue(cellunion.intersects(input.get(j).childEnd().prev())); + assertTrue(cellunion.contains(input.get(j).childBegin(S2CellId.MAX_LEVEL))); + assertTrue(cellunion.intersects(input.get(j).childBegin(S2CellId.MAX_LEVEL))); + } + } + for (int j = 0; j < expected.size(); ++j) { + if (!expected.get(j).isFace()) { + assertTrue(!cellunion.contains(expected.get(j).parent())); + assertTrue(!cellunion.contains(expected.get(j).parent(0))); + } + } + + // Test contains(S2CellUnion) and intersects(S2CellUnion) + ArrayList<S2CellId> x = Lists.newArrayList(); + ArrayList<S2CellId> y = Lists.newArrayList(); + ArrayList<S2CellId> xOrY = Lists.newArrayList(); + ArrayList<S2CellId> xAndY = Lists.newArrayList(); + for (int j = 0; j < input.size(); ++j) { + boolean inX = random(2) == 0; + boolean inY = random(2) == 0; + if (inX) { + x.add(input.get(j)); + } + if (inY) { + y.add(input.get(j)); + } + if (inX || inY) { + xOrY.add(input.get(j)); + } + } + S2CellUnion xCells = new S2CellUnion(); + S2CellUnion yCells = new S2CellUnion(); + S2CellUnion xOrYExpected = new S2CellUnion(); + S2CellUnion xAndYExpected = new S2CellUnion(); + xCells.initFromCellIds(x); + yCells.initFromCellIds(y); + xOrYExpected.initFromCellIds(xOrY); + + S2CellUnion xOrYCells = new S2CellUnion(); + xOrYCells.getUnion(xCells, yCells); + assertEquals(xOrYExpected, xOrYCells); + + // Compute the intersection of "x" with each cell of "y", + // check that this intersection is correct, and append the + // results to xAndYExpected. + for (int j = 0; j < yCells.size(); ++j) { + S2CellId yId = yCells.cellId(j); + S2CellUnion u = new S2CellUnion(); + u.getIntersection(xCells, yId); + for (int k = 0; k < xCells.size(); ++k) { + S2CellId xId = xCells.cellId(k); + if (xId.contains(yId)) { + assertEquals(1, u.size()); + assertEquals(yId, u.cellId(0)); + } else if (yId.contains(xId)) { + if (!u.contains(xId)) { + u.getIntersection(xCells, yId); + } + assertTrue(u.contains(xId)); + } + } + for (int k = 0; k < u.size(); ++k) { + assertTrue(xCells.contains(u.cellId(k))); + assertTrue(yId.contains(u.cellId(k))); + } + xAndY.addAll(u.cellIds()); + } + xAndYExpected.initFromCellIds(xAndY); + + S2CellUnion xAndYCells = new S2CellUnion(); + xAndYCells.getIntersection(xCells, yCells); + assertEquals(xAndYExpected, xAndYCells); + + ArrayList<S2CellId> test = Lists.newArrayList(); + ArrayList<S2CellId> dummy = Lists.newArrayList(); + + addCells(S2CellId.none(), false, test, dummy); + for (int j = 0; j < test.size(); ++j) { + boolean contains = false, intersects = false; + for (int k = 0; k < expected.size(); ++k) { + if (expected.get(k).contains(test.get(j))) { + contains = true; + } + if (expected.get(k).intersects(test.get(j))) { + intersects = true; + } + } + assertEquals(cellunion.contains(test.get(j)), contains); + assertEquals(cellunion.intersects(test.get(j)), intersects); + } + + } + } + + double getMaxAngle(S2CellUnion covering, S2Point axis) { + double maxAngle = 0; + for (int i = 0; i < covering.size(); ++i) { + S2Cell cell = new S2Cell(covering.cellId(i)); + S2Cap cellCap = cell.getCapBound(); + double angle = axis.angle(cellCap.axis()) + cellCap.angle().radians(); + maxAngle = Math.max(maxAngle, angle); + } + return maxAngle; + } + + public void testExpand() { + logger.info("TestExpand"); + + // This test generates coverings for caps of random sizes, and expands + // the coverings by a random radius, and then make sure that the new + // covering covers the expanded cap. It also makes sure that the + // new covering is not too much larger than expected. + + S2RegionCoverer coverer = new S2RegionCoverer(); + for (int i = 0; i < 1000; ++i) { + S2Cap cap = getRandomCap(S2Cell.averageArea(S2CellId.MAX_LEVEL), 4 * S2.M_PI); + + // Expand the cap by a random factor whose log is uniformly distributed + // between 0 and log(1e2). + S2Cap expandedCap = + S2Cap.fromAxisHeight(cap.axis(), Math.min(2.0, Math.pow(1e2, rand.nextDouble()) + * cap.height())); + + double radius = expandedCap.angle().radians() - cap.angle().radians(); + int maxLevelDiff = random(8); + + S2CellUnion covering = new S2CellUnion(); + coverer.setMaxCells(1 + skewed(10)); + coverer.getCovering(cap, covering); + checkCovering(cap, covering, true, new S2CellId()); + + double maxAngle = getMaxAngle(covering, cap.axis()); + int minLevel = S2CellId.MAX_LEVEL; + for (int j = 0; j < covering.size(); ++j) { + minLevel = Math.min(minLevel, covering.cellId(j).level()); + } + covering.expand(S1Angle.radians(radius), maxLevelDiff); + checkCovering(expandedCap, covering, false, new S2CellId()); + + int expandLevel = + Math.min(minLevel + maxLevelDiff, S2Projections.MIN_WIDTH.getMaxLevel(radius)); + double expandedMaxAngle = getMaxAngle(covering, cap.axis()); + + // If the covering includes a tiny cell along the boundary, in theory the + // maximum angle of the covering from the cap axis can increase by up to + // twice the maximum length of a cell diagonal. We allow for an increase + // of slightly more than this because cell bounding caps are not exact. + assertTrue(expandedMaxAngle - maxAngle <= 2.01 * S2Projections.MAX_DIAG + .getValue(expandLevel)); + } + } + + public void testLeafCellsCovered() { + S2CellUnion cellUnion = new S2CellUnion(); + + // empty union + assertEquals(0, cellUnion.leafCellsCovered()); + + ArrayList<S2CellId> ids = Lists.newArrayList(); + ids.add(S2CellId.fromFacePosLevel( + 0, (1L << ((S2CellId.MAX_LEVEL << 1) - 1)), S2CellId.MAX_LEVEL)); + + // One leaf on face 0. + cellUnion.initFromCellIds(ids); + assertEquals(1L, cellUnion.leafCellsCovered()); + + // Face 0. + ids.add(S2CellId.fromFacePosLevel(0, 0, 0)); + cellUnion.initFromCellIds(ids); + assertEquals(1L << 60, cellUnion.leafCellsCovered()); + + // Five faces. + cellUnion.expand(0); + assertEquals(5L << 60, cellUnion.leafCellsCovered()); + + // Whole world. + cellUnion.expand(0); + assertEquals(6L << 60, cellUnion.leafCellsCovered()); + + // Add some disjoint cells. + ids.add(S2CellId.fromFacePosLevel(1, 0, 1)); + ids.add(S2CellId.fromFacePosLevel(2, 0, 2)); + ids.add(S2CellId.fromFacePosLevel(2, (1L << 60), 2)); + ids.add(S2CellId.fromFacePosLevel(3, 0, 14)); + ids.add(S2CellId.fromFacePosLevel(4, (1L << 60), 15)); + ids.add(S2CellId.fromFacePosLevel(4, 0, 27)); + ids.add(S2CellId.fromFacePosLevel(5, 0, 30)); + cellUnion.initFromCellIds(ids); + long expected = 1L + (1L << 6) + (1L << 30) + (1L << 32) + (2L << 56) + (1L << 58) + (1L << 60); + assertEquals(expected, cellUnion.leafCellsCovered()); + } + + + public void testAverageBasedArea() { + S2CellUnion cellUnion = new S2CellUnion(); + + // empty union + assertEquals(0.0, cellUnion.averageBasedArea()); + + ArrayList<S2CellId> ids = Lists.newArrayList(); + ids.add(S2CellId.fromFacePosLevel(1, 0, 1)); + ids.add(S2CellId.fromFacePosLevel(5, 0, 30)); + cellUnion.initFromCellIds(ids); + + double expected = S2Cell.averageArea(S2CellId.MAX_LEVEL) * (1L + (1L << 58)); + assertEquals(expected, cellUnion.averageBasedArea()); + } + + public void testApproxArea() { + S2CellUnion cellUnion = new S2CellUnion(); + + // empty union + assertEquals(0.0, cellUnion.approxArea()); + + ArrayList<S2CellId> ids = Lists.newArrayList(); + ids.add(S2CellId.fromFacePosLevel(1, 0, 1)); + ids.add(S2CellId.fromFacePosLevel(5, 0, 30)); + cellUnion.initFromCellIds(ids); + + double expected = new S2Cell(ids.get(0)).approxArea() + new S2Cell(ids.get(1)).approxArea(); + assertEquals(expected, cellUnion.approxArea()); + } + + public void testExactArea() { + S2CellUnion cellUnion = new S2CellUnion(); + + // empty union + assertEquals(0.0, cellUnion.exactArea()); + + ArrayList<S2CellId> ids = Lists.newArrayList(); + ids.add(S2CellId.fromFacePosLevel(1, 0, 1)); + ids.add(S2CellId.fromFacePosLevel(5, 0, 30)); + cellUnion.initFromCellIds(ids); + + double expected = new S2Cell(ids.get(0)).exactArea() + new S2Cell(ids.get(1)).exactArea(); + assertEquals(expected, cellUnion.averageBasedArea()); + } +} diff --git a/tests/com/google/common/geometry/S2EdgeIndexTest.java b/tests/com/google/common/geometry/S2EdgeIndexTest.java new file mode 100644 index 0000000..6928171 --- /dev/null +++ b/tests/com/google/common/geometry/S2EdgeIndexTest.java @@ -0,0 +1,190 @@ +/* + * Copyright 2011 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; + +import java.util.HashSet; +import java.util.List; +import java.util.logging.Logger; + +/** + * Tests for {@link S2EdgeIndex}. + * + * @author andriy@google.com (Andriy Bihun) ported from util/geometry + * @author pilloff@google.com (Mark Pilloff) original author + */ +public strictfp class S2EdgeIndexTest extends GeometryTestCase { + private static final Logger log = Logger.getLogger(S2EdgeIndexTest.class.getCanonicalName()); + + public static class EdgeVectorIndex extends S2EdgeIndex { + private List<S2Edge> edges; + + public EdgeVectorIndex(List<S2Edge> edges) { + this.edges = edges; + } + + @Override + protected int getNumEdges() { + return edges.size(); + } + + @Override + protected S2Point edgeFrom(int index) { + return edges.get(index).getStart(); + } + + @Override + protected S2Point edgeTo(int index) { + return edges.get(index).getEnd(); + } + } + + /** + * Generates a random edge whose center is in the given cap. + */ + private S2Edge randomEdgeCrossingCap(double maxLengthMeters, S2Cap cap) { + // Pick the edge center at random. + S2Point edgeCenter = samplePoint(cap); + // Pick two random points in a suitably sized cap about the edge center. + S2Cap edgeCap = S2Cap.fromAxisAngle( + edgeCenter, S1Angle.radians(maxLengthMeters / S2LatLng.EARTH_RADIUS_METERS / 2)); + S2Point p1 = samplePoint(edgeCap); + S2Point p2 = samplePoint(edgeCap); + return new S2Edge(p1, p2); + } + + /* + * Generates "numEdges" random edges, of length at most "edgeLengthMetersMax" + * and each of whose center is in a randomly located cap with radius + * "capSpanMeters", and puts results into "edges". + */ + private void generateRandomEarthEdges( + double edgeLengthMetersMax, double capSpanMeters, int numEdges, List<S2Edge> edges) { + S2Cap cap = S2Cap.fromAxisAngle( + randomPoint(), S1Angle.radians(capSpanMeters / S2LatLng.EARTH_RADIUS_METERS)); + for (int i = 0; i < numEdges; ++i) { + edges.add(randomEdgeCrossingCap(edgeLengthMetersMax, cap)); + } + } + + private void checkAllCrossings( + List<S2Edge> allEdges, int minCrossings, int maxChecksCrossingsRatio) { + EdgeVectorIndex index = new EdgeVectorIndex(allEdges); + index.computeIndex(); + EdgeVectorIndex.DataEdgeIterator it = new EdgeVectorIndex.DataEdgeIterator(index); + double totalCrossings = 0; + double totalIndexChecks = 0; + + for (int in = 0; in < allEdges.size(); ++in) { + S2Edge e = allEdges.get(in); + + HashSet<Integer> candidateSet = Sets.newHashSet(); + + StringBuilder sb = new StringBuilder(); + for (it.getCandidates(e.getStart(), e.getEnd()); it.hasNext(); it.next()) { + candidateSet.add(it.index()); + sb.append(it.index()).append("/"); + ++totalIndexChecks; + } + + for (int i = 0; i < allEdges.size(); ++i) { + int crossing = S2EdgeUtil.robustCrossing( + e.getStart(), e.getEnd(), allEdges.get(i).getStart(), allEdges.get(i).getEnd()); + if (crossing >= 0) { + StringBuilder sbError = new StringBuilder(); + sbError + .append("\n==CHECK_ERROR===================================\n") + .append("CandidateSet: ") + .append(sb) + .append("\nin=") + .append(in) + .append(" i=") + .append(i) + .append(" robustCrossing=") + .append(crossing) + .append("\nfrom:\n") + .append(e) + .append("\nto:\n") + .append(allEdges.get(i)) + .append("\n=================================================="); + assertTrue(sbError.toString(), candidateSet.contains(i)); + ++totalCrossings; + } + } + } + + log.info( + "Pairs/num crossings/check crossing ratio: " + + Integer.toString(allEdges.size() * allEdges.size()) + "/" + + Double.toString(totalCrossings) + "/" + + Double.toString(totalIndexChecks / totalCrossings)); + assertTrue(minCrossings <= totalCrossings); + assertTrue(totalCrossings * maxChecksCrossingsRatio >= totalIndexChecks); + } + + /* + * Generates random edges and tests, for each edge, that all those that cross + * are candidates. + */ + private void tryCrossingsRandomInCap(int numEdges, double edgeLengthMax, double capSpanMeters, + int minCrossings, int maxChecksCrossingsRatio) { + List<S2Edge> allEdges = Lists.newArrayList(); + generateRandomEarthEdges(edgeLengthMax, capSpanMeters, numEdges, allEdges); + checkAllCrossings(allEdges, minCrossings, maxChecksCrossingsRatio); + } + + public void testSpecificEdges() { + List<S2Point> ps = Lists.newArrayList(); + ps.add(new S2Point(0.8088625416501157, -0.40633615485481134, 0.4250086092929434)); + ps.add(new S2Point(0.8088939911085784, -0.40631384442755236, 0.4249700824469155)); + ps.add(new S2Point(0.8088088971141814, -0.40642839367135375, 0.425022503835579)); + ps.add(new S2Point(0.8088643962606756, -0.406333410696549, 0.4250077032402616)); + List<S2Edge> allEdges = Lists.newArrayList(); + allEdges.add(new S2Edge(ps.get(0), ps.get(1))); + allEdges.add(new S2Edge(ps.get(2), ps.get(3))); + checkAllCrossings(allEdges, 0, 16); + } + + public void testLoopCandidateOfItself() { + List<S2Point> ps = Lists.newArrayList(); // A diamond loop around 0,180. + ps.add(makePoint("0:178")); + ps.add(makePoint("-1:180")); + ps.add(makePoint("0:-179")); + ps.add(makePoint("1:-180")); + List<S2Edge> allEdges = Lists.newArrayList(); + for (int i = 0; i < 4; ++i) { + allEdges.add(new S2Edge(ps.get(i), ps.get((i + 1) % 4))); + } + checkAllCrossings(allEdges, 0, 16); + } + + public void testRandomEdgeCrossings() { + tryCrossingsRandomInCap(2000, 30, 5000, 500, 2); + tryCrossingsRandomInCap(1000, 100, 5000, 500, 3); + tryCrossingsRandomInCap(1000, 1000, 5000, 1000, 40); + tryCrossingsRandomInCap(500, 5000, 5000, 5000, 20); + } + + public void testRandomEdgeCrossingsSparse() { + for (int i = 0; i < 5; ++i) { + tryCrossingsRandomInCap(2000, 100, 5000, 500, 8); + tryCrossingsRandomInCap(2000, 300, 50000, 1000, 10); + } + } +} diff --git a/tests/com/google/common/geometry/S2EdgeUtilTest.java b/tests/com/google/common/geometry/S2EdgeUtilTest.java new file mode 100644 index 0000000..57ea7da --- /dev/null +++ b/tests/com/google/common/geometry/S2EdgeUtilTest.java @@ -0,0 +1,478 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.ImmutableList; + +/** + * Tests for {@link S2EdgeUtil}. + * + */ +public strictfp class S2EdgeUtilTest extends GeometryTestCase { + + public static final int DEGENERATE = -2; + + private void compareResult(int actual, int expected) { + // HACK ALERT: RobustCrossing() is allowed to return 0 or -1 if either edge + // is degenerate. We use the value kDegen to represent this possibility. + if (expected == DEGENERATE) { + assertTrue(actual <= 0); + } else { + assertEquals(expected, actual); + } + } + + private void assertCrossing(S2Point a, + S2Point b, + S2Point c, + S2Point d, + int robust, + boolean edgeOrVertex, + boolean simple) { + a = S2Point.normalize(a); + b = S2Point.normalize(b); + c = S2Point.normalize(c); + d = S2Point.normalize(d); + + compareResult(S2EdgeUtil.robustCrossing(a, b, c, d), robust); + if (simple) { + assertEquals(robust > 0, S2EdgeUtil.simpleCrossing(a, b, c, d)); + } + S2EdgeUtil.EdgeCrosser crosser = new S2EdgeUtil.EdgeCrosser(a, b, c); + compareResult(crosser.robustCrossing(d), robust); + compareResult(crosser.robustCrossing(c), robust); + + assertEquals(S2EdgeUtil.edgeOrVertexCrossing(a, b, c, d), edgeOrVertex); + assertEquals(edgeOrVertex, crosser.edgeOrVertexCrossing(d)); + assertEquals(edgeOrVertex, crosser.edgeOrVertexCrossing(c)); + } + + private void assertCrossings(S2Point a, + S2Point b, + S2Point c, + S2Point d, + int robust, + boolean edgeOrVertex, + boolean simple) { + assertCrossing(a, b, c, d, robust, edgeOrVertex, simple); + assertCrossing(b, a, c, d, robust, edgeOrVertex, simple); + assertCrossing(a, b, d, c, robust, edgeOrVertex, simple); + assertCrossing(b, a, d, c, robust, edgeOrVertex, simple); + assertCrossing(a, a, c, d, DEGENERATE, false, false); + assertCrossing(a, b, c, c, DEGENERATE, false, false); + assertCrossing(a, b, a, b, 0, true, false); + assertCrossing(c, d, a, b, robust, (edgeOrVertex ^ (robust == 0)), simple); + } + + public void testCrossings() { + // The real tests of edge crossings are in s2{loop,polygon}_unittest, + // but we do a few simple tests here. + + // Two regular edges that cross. + assertCrossings(new S2Point(1, 2, 1), + new S2Point(1, -3, 0.5), + new S2Point(1, -0.5, -3), + new S2Point(0.1, 0.5, 3), + 1, + true, + true); + + // Two regular edges that cross antipodal points. + assertCrossings(new S2Point(1, 2, 1), + new S2Point(1, -3, 0.5), + new S2Point(-1, 0.5, 3), + new S2Point(-0.1, -0.5, -3), + -1, + false, + true); + + // Two edges on the same great circle. + assertCrossings(new S2Point(0, 0, -1), + new S2Point(0, 1, 0), + new S2Point(0, 1, 1), + new S2Point(0, 0, 1), + -1, + false, + true); + + // Two edges that cross where one vertex is S2.Origin(). + assertCrossings(new S2Point(1, 0, 0), + new S2Point(0, 1, 0), + new S2Point(0, 0, 1), + new S2Point(1, 1, -1), + 1, + true, + true); + + // Two edges that cross antipodal points where one vertex is S2.Origin(). + assertCrossings(new S2Point(1, 0, 0), + new S2Point(0, 1, 0), + new S2Point(0, 0, -1), + new S2Point(-1, -1, 1), + -1, + false, + true); + + // Two edges that share an endpoint. The Ortho() direction is (-4,0,2), + // and edge CD is further CCW around (2,3,4) than AB. + assertCrossings(new S2Point(2, 3, 4), + new S2Point(-1, 2, 5), + new S2Point(7, -2, 3), + new S2Point(2, 3, 4), + 0, + false, + true); + + // Two edges that barely cross edge other. + assertCrossings(new S2Point(1, 1, 1), + new S2Point(1, 1 - 1e-15, -1), + new S2Point(-1, -1, 0), + new S2Point(1, 1, 0), + 1, + true, + false); + } + + private S2LatLngRect getEdgeBound(double x1, + double y1, + double z1, + double x2, + double y2, + double z2) { + S2EdgeUtil.RectBounder bounder = new S2EdgeUtil.RectBounder(); + S2Point p1 = S2Point.normalize(new S2Point(x1, y1, z1)); + S2Point p2 = S2Point.normalize(new S2Point(x2, y2, z2)); + bounder.addPoint(p1); + bounder.addPoint(p2); + return bounder.getBound(); + } + + public void testRectBounder() { + // Check cases where min/max latitude is not at a vertex. + // Max, CW + assertDoubleNear(getEdgeBound(1, 1, 1, 1, -1, 1).lat().hi(), S2.M_PI_4); + // Max, CCW + assertDoubleNear(getEdgeBound(1, -1, 1, 1, 1, 1).lat().hi(), S2.M_PI_4); + // Min, CW + assertDoubleNear(getEdgeBound(1, -1, -1, -1, -1, -1).lat().lo(), -S2.M_PI_4); + // Min, CCW + assertDoubleNear(getEdgeBound(-1, 1, -1, -1, -1, -1).lat().lo(), -S2.M_PI_4); + + // Check cases where the edge passes through one of the poles. + assertDoubleNear(getEdgeBound(.3, .4, 1, -.3, -.4, 1).lat().hi(), S2.M_PI_2); + assertDoubleNear(getEdgeBound(.3, .4, -1, -.3, -.4, -1).lat().lo(), -S2.M_PI_2); + + // Check cases where the min/max latitude is attained at a vertex. + double kCubeLat = Math.asin(Math.sqrt(1. / 3)); // 35.26 degrees + assertTrue( + getEdgeBound(1, 1, 1, 1, -1, -1).lat().approxEquals(new R1Interval(-kCubeLat, kCubeLat))); + assertTrue( + getEdgeBound(1, -1, 1, 1, 1, -1).lat().approxEquals(new R1Interval(-kCubeLat, kCubeLat))); + } + + public void testLongitudePruner() { + S2EdgeUtil.LongitudePruner pruner1 = new S2EdgeUtil.LongitudePruner( + new S1Interval(0.75 * S2.M_PI, -0.75 * S2.M_PI), new S2Point(0, 1, 2)); + + assertEquals(pruner1.intersects(new S2Point(1, 1, 3)), false); + assertEquals(pruner1.intersects(new S2Point(-1 - 1e-15, -1, 0)), true); + assertEquals(pruner1.intersects(new S2Point(-1, 0, 0)), true); + assertEquals(pruner1.intersects(new S2Point(-1, 0, 0)), true); + assertEquals(pruner1.intersects(new S2Point(1, -1, 8)), true); + assertEquals(pruner1.intersects(new S2Point(1, 0, -2)), false); + assertEquals(pruner1.intersects(new S2Point(-1, -1e-15, 0)), true); + + S2EdgeUtil.LongitudePruner pruner2 = new S2EdgeUtil.LongitudePruner( + new S1Interval(0.25 * S2.M_PI, 0.25 * S2.M_PI), new S2Point(1, 0, 0)); + + assertEquals(pruner2.intersects(new S2Point(2, 1, 2)), false); + assertEquals(pruner2.intersects(new S2Point(1, 2, 3)), true); + assertEquals(pruner2.intersects(new S2Point(0, 1, 4)), false); + assertEquals(pruner2.intersects(new S2Point(-1e-15, -1, -1)), false); + } + + private void assertWedge(S2Point a0, + S2Point ab1, + S2Point a2, + S2Point b0, + S2Point b2, + boolean contains, + boolean intersects, + boolean crosses) { + a0 = S2Point.normalize(a0); + ab1 = S2Point.normalize(ab1); + a2 = S2Point.normalize(a2); + b0 = S2Point.normalize(b0); + b2 = S2Point.normalize(b2); + + assertEquals(new S2EdgeUtil.WedgeContains().test(a0, ab1, a2, b0, b2), contains ? 1 : 0); + assertEquals(new S2EdgeUtil.WedgeIntersects().test(a0, ab1, a2, b0, b2), intersects ? -1 : 0); + assertEquals(new S2EdgeUtil.WedgeContainsOrIntersects().test(a0, ab1, a2, b0, b2), + contains ? 1 : intersects ? -1 : 0); + assertEquals(new S2EdgeUtil.WedgeContainsOrCrosses().test(a0, ab1, a2, b0, b2), + contains ? 1 : crosses ? -1 : 0); + } + + public void testWedges() { + // For simplicity, all of these tests use an origin of (0, 0, 1). + // This shouldn't matter as long as the lower-level primitives are + // implemented correctly. + + // Intersection in one wedge. + assertWedge(new S2Point(-1, 0, 10), + new S2Point(0, 0, 1), + new S2Point(1, 2, 10), + new S2Point(0, 1, 10), + new S2Point(1, -2, 10), + false, + true, + true); + // Intersection in two wedges. + assertWedge(new S2Point(-1, -1, 10), + new S2Point(0, 0, 1), + new S2Point(1, -1, 10), + new S2Point(1, 0, 10), + new S2Point(-1, 1, 10), + false, + true, + true); + + // Normal containment. + assertWedge(new S2Point(-1, -1, 10), + new S2Point(0, 0, 1), + new S2Point(1, -1, 10), + new S2Point(-1, 0, 10), + new S2Point(1, 0, 10), + true, + true, + false); + // Containment with equality on one side. + assertWedge(new S2Point(2, 1, 10), + new S2Point(0, 0, 1), + new S2Point(-1, -1, 10), + new S2Point(2, 1, 10), + new S2Point(1, -5, 10), + true, + true, + false); + // Containment with equality on the other side. + assertWedge(new S2Point(2, 1, 10), + new S2Point(0, 0, 1), + new S2Point(-1, -1, 10), + new S2Point(1, -2, 10), + new S2Point(-1, -1, 10), + true, + true, + false); + // Containment with equality on both sides. + assertWedge(new S2Point(-2, 3, 10), + new S2Point(0, 0, 1), + new S2Point(4, -5, 10), + new S2Point(-2, 3, 10), + new S2Point(4, -5, 10), + true, + true, + false); + + // Disjoint with equality on one side. + assertWedge(new S2Point(-2, 3, 10), + new S2Point(0, 0, 1), + new S2Point(4, -5, 10), + new S2Point(4, -5, 10), + new S2Point(-2, -3, 10), + false, + false, + false); + // Disjoint with equality on the other side. + assertWedge(new S2Point(-2, 3, 10), + new S2Point(0, 0, 1), + new S2Point(0, 5, 10), + new S2Point(4, -5, 10), + new S2Point(-2, 3, 10), + false, + false, + false); + // Disjoint with equality on both sides. + assertWedge(new S2Point(-2, 3, 10), + new S2Point(0, 0, 1), + new S2Point(4, -5, 10), + new S2Point(4, -5, 10), + new S2Point(-2, 3, 10), + false, + false, + false); + + // B contains A with equality on one side. + assertWedge(new S2Point(2, 1, 10), + new S2Point(0, 0, 1), + new S2Point(1, -5, 10), + new S2Point(2, 1, 10), + new S2Point(-1, -1, 10), + false, + true, + false); + // B contains A with equality on the other side. + assertWedge(new S2Point(2, 1, 10), + new S2Point(0, 0, 1), + new S2Point(1, -5, 10), + new S2Point(-2, 1, 10), + new S2Point(1, -5, 10), + false, + true, + false); + } + + public void testGetClosestPoint() { + final double kMargin = 1e-6; + + S2Point a = S2LatLng.fromDegrees(-0.5, 0).toPoint(); + S2Point b = S2LatLng.fromDegrees(+0.5, 0).toPoint(); + + // On edge at end points. + assertEquals(a, S2EdgeUtil.getClosestPoint(a, a, b)); + assertEquals(b, S2EdgeUtil.getClosestPoint(b, a, b)); + + // On edge in between. + S2Point mid = S2LatLng.fromDegrees(0, 0).toPoint(); + assertEquals(mid, S2EdgeUtil.getClosestPoint(mid, a, b)); + + // End points are closest + assertEquals(a, S2EdgeUtil.getClosestPoint(S2LatLng.fromDegrees(-1, 0).toPoint(), a, b)); + assertEquals(b, S2EdgeUtil.getClosestPoint(S2LatLng.fromDegrees(+1, 0).toPoint(), a, b)); + + // Intermediate point is closest. + S2Point x = S2LatLng.fromDegrees(+0.1, 1).toPoint(); + S2Point expectedClosestPoint = S2LatLng.fromDegrees(+0.1, 0).toPoint(); + + assertTrue(expectedClosestPoint.aequal(S2EdgeUtil.getClosestPoint(x, a, b), kMargin)); + } + + // Given a point X and an edge AB, check that the distance from X to AB is + // "distanceRadians" and the closest point on AB is "expectedClosest". + private static void checkDistance( + S2Point x, S2Point a, S2Point b, double distanceRadians, S2Point expectedClosest) { + final double kEpsilon = 1e-10; + x = S2Point.normalize(x); + a = S2Point.normalize(a); + b = S2Point.normalize(b); + expectedClosest = S2Point.normalize(expectedClosest); + + assertEquals(distanceRadians, S2EdgeUtil.getDistance(x, a, b).radians(), kEpsilon); + + S2Point closest = S2EdgeUtil.getClosestPoint(x, a, b); + if (expectedClosest.equals(new S2Point(0, 0, 0))) { + // This special value says that the result should be A or B. + assertTrue(closest == a || closest == b); + } else { + assertTrue(S2.approxEquals(closest, expectedClosest)); + } + } + + public void testGetDistance() { + checkDistance( + new S2Point(1, 0, 0), new S2Point(1, 0, 0), new S2Point(0, 1, 0), 0, new S2Point(1, 0, 0)); + checkDistance( + new S2Point(0, 1, 0), new S2Point(1, 0, 0), new S2Point(0, 1, 0), 0, new S2Point(0, 1, 0)); + checkDistance( + new S2Point(1, 3, 0), new S2Point(1, 0, 0), new S2Point(0, 1, 0), 0, new S2Point(1, 3, 0)); + checkDistance(new S2Point(0, 0, 1), new S2Point(1, 0, 0), new S2Point(0, 1, 0), Math.PI / 2, + new S2Point(1, 0, 0)); + checkDistance(new S2Point(0, 0, -1), new S2Point(1, 0, 0), new S2Point(0, 1, 0), Math.PI / 2, + new S2Point(1, 0, 0)); + checkDistance(new S2Point(-1, -1, 0), new S2Point(1, 0, 0), new S2Point(0, 1, 0), + 0.75 * Math.PI, new S2Point(0, 0, 0)); + checkDistance(new S2Point(0, 1, 0), new S2Point(1, 0, 0), new S2Point(1, 1, 0), Math.PI / 4, + new S2Point(1, 1, 0)); + checkDistance(new S2Point(0, -1, 0), new S2Point(1, 0, 0), new S2Point(1, 1, 0), Math.PI / 2, + new S2Point(1, 0, 0)); + checkDistance(new S2Point(0, -1, 0), new S2Point(1, 0, 0), new S2Point(-1, 1, 0), Math.PI / 2, + new S2Point(1, 0, 0)); + checkDistance(new S2Point(-1, -1, 0), new S2Point(1, 0, 0), new S2Point(-1, 1, 0), Math.PI / 2, + new S2Point(-1, 1, 0)); + checkDistance(new S2Point(1, 1, 1), new S2Point(1, 0, 0), new S2Point(0, 1, 0), + Math.asin(Math.sqrt(1. / 3)), new S2Point(1, 1, 0)); + checkDistance(new S2Point(1, 1, -1), new S2Point(1, 0, 0), new S2Point(0, 1, 0), + Math.asin(Math.sqrt(1. / 3)), new S2Point(1, 1, 0)); + checkDistance(new S2Point(-1, 0, 0), new S2Point(1, 1, 0), new S2Point(1, 1, 0), 0.75 * Math.PI, + new S2Point(1, 1, 0)); + checkDistance(new S2Point(0, 0, -1), new S2Point(1, 1, 0), new S2Point(1, 1, 0), Math.PI / 2, + new S2Point(1, 1, 0)); + checkDistance(new S2Point(-1, 0, 0), new S2Point(1, 0, 0), new S2Point(1, 0, 0), Math.PI, + new S2Point(1, 0, 0)); + } + + public void testIntersectionTolerance() { + // We repeatedly construct two edges that cross near a random point "p", + // and measure the distance from the actual intersection point "x" to the + // the expected intersection point "p" and also to the edges that cross + // near "p". + // + // Note that getIntersection() does not guarantee that "x" and "p" will be + // close together (since the intersection point is numerically unstable + // when the edges cross at a very small angle), but it does guarantee that + // "x" will be close to both of the edges that cross. + S1Angle maxPointDist = new S1Angle(); + S1Angle maxEdgeDist = new S1Angle(); + + for (int i = 0; i < 1000; ++i) { + // We construct two edges AB and CD that intersect near "p". The angle + // between AB and CD (expressed as a slope) is chosen randomly between + // 1e-15 and 1.0 such that its logarithm is uniformly distributed. This + // implies that small values are much more likely to be chosen. + // + // Once the slope is chosen, the four points ABCD must be offset from P + // by at least (1e-15 / slope) so that the points are guaranteed to have + // the correct circular ordering around P. This is the distance from P + // at which the two edges are separated by about 1e-15, which is + // approximately the minimum distance at which we can expect computed + // points on the two lines to be distinct and have the correct ordering. + // + // The actual offset distance from P is chosen randomly in the range + // [1e-15 / slope, 1.0], again uniformly distributing the logarithm. + // This ensures that we test both long and very short segments that + // intersect at both large and very small angles. + + ImmutableList<S2Point> points = getRandomFrame(); + S2Point p = points.get(0); + S2Point d1 = points.get(1); + S2Point d2 = points.get(2); + double slope = Math.pow(1e-15, rand.nextDouble()); + d2 = S2Point.add(d1, S2Point.mul(d2, slope)); + S2Point a = S2Point.normalize( + S2Point.add(p, S2Point.mul(d1, Math.pow(1e-15 / slope, rand.nextDouble())))); + S2Point b = S2Point.normalize( + S2Point.sub(p, S2Point.mul(d1, Math.pow(1e-15 / slope, rand.nextDouble())))); + S2Point c = S2Point.normalize( + S2Point.add(p, S2Point.mul(d2, Math.pow(1e-15 / slope, rand.nextDouble())))); + S2Point d = S2Point.normalize( + S2Point.sub(p, S2Point.mul(d2, Math.pow(1e-15 / slope, rand.nextDouble())))); + S2Point x = S2EdgeUtil.getIntersection(a, b, c, d); + S1Angle distAb = S2EdgeUtil.getDistance(x, a, b); + S1Angle distCd = S2EdgeUtil.getDistance(x, c, d); + + assertTrue(distAb.lessThan(S2EdgeUtil.DEFAULT_INTERSECTION_TOLERANCE)); + assertTrue(distCd.lessThan(S2EdgeUtil.DEFAULT_INTERSECTION_TOLERANCE)); + + // test getIntersection() post conditions + assertTrue(S2.orderedCCW(a, x, b, S2Point.normalize(S2.robustCrossProd(a, b)))); + assertTrue(S2.orderedCCW(c, x, d, S2Point.normalize(S2.robustCrossProd(c, d)))); + + maxEdgeDist = S1Angle.max(maxEdgeDist, S1Angle.max(distAb, distCd)); + maxPointDist = S1Angle.max(maxPointDist, new S1Angle(p, x)); + } + } +} diff --git a/tests/com/google/common/geometry/S2LatLngRectTest.java b/tests/com/google/common/geometry/S2LatLngRectTest.java new file mode 100644 index 0000000..77038bf --- /dev/null +++ b/tests/com/google/common/geometry/S2LatLngRectTest.java @@ -0,0 +1,512 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +public strictfp class S2LatLngRectTest extends GeometryTestCase { + + public void testIntervalOps(S2LatLngRect x, S2LatLngRect y, String expectedRelation, + S2LatLngRect expectedUnion, S2LatLngRect expectedIntersection) { + // Test all of the interval operations on the given pair of intervals. + // "expected_relation" is a sequence of "T" and "F" characters corresponding + // to the expected results of Contains(), InteriorContains(), Intersects(), + // and InteriorIntersects() respectively. + + assertEquals(x.contains(y), expectedRelation.charAt(0) == 'T'); + assertEquals(x.interiorContains(y), expectedRelation.charAt(1) == 'T'); + assertEquals(x.intersects(y), expectedRelation.charAt(2) == 'T'); + assertEquals(x.interiorIntersects(y), expectedRelation.charAt(3) == 'T'); + + assertEquals(x.contains(y), x.union(y).equals(x)); + assertEquals(x.intersects(y), !x.intersection(y).isEmpty()); + + assertTrue(x.union(y).equals(expectedUnion)); + assertTrue(x.intersection(y).equals(expectedIntersection)); + + if (y.getSize() == S2LatLng.fromRadians(0, 0)) { + S2LatLngRect r = x.addPoint(y.lo()); + assertTrue(r == expectedUnion); + } + } + + public void testCellOps(S2LatLngRect r, S2Cell cell, int level) { + // Test the relationship between the given rectangle and cell: + // 0 == no intersection, 1 == MayIntersect, 2 == Intersects, + // 3 == Vertex Containment, 4 == Contains + + boolean vertexContained = false; + for (int i = 0; i < 4; ++i) { + if (r.contains(cell.getVertexRaw(i)) + || (!r.isEmpty() && cell.contains(r.getVertex(i).toPoint()))) { + vertexContained = true; + } + } + assertEquals(r.mayIntersect(cell), level >= 1); + assertEquals(r.intersects(cell), level >= 2); + assertEquals(vertexContained, level >= 3); + assertEquals(r.contains(cell), level >= 4); + } + + public void testBasic() { + // Most of the S2LatLngRect methods have trivial implementations that + // use the R1Interval and S1Interval classes, so most of the testing + // is done in those unit tests. + + // Test basic properties of empty and full caps. + S2LatLngRect empty = S2LatLngRect.empty(); + S2LatLngRect full = S2LatLngRect.full(); + assertTrue(empty.isValid()); + assertTrue(empty.isEmpty()); + assertTrue(full.isValid()); + assertTrue(full.isFull()); + + // assertTrue various constructors and accessor methods. + S2LatLngRect d1 = rectFromDegrees(-90, 0, -45, 180); + assertDoubleNear(d1.latLo().degrees(), -90); + assertDoubleNear(d1.latHi().degrees(), -45); + assertDoubleNear(d1.lngLo().degrees(), 0); + assertDoubleNear(d1.lngHi().degrees(), 180); + assertTrue(d1.lat().equals(new R1Interval(-S2.M_PI_2, -S2.M_PI_4))); + assertTrue(d1.lng().equals(new S1Interval(0, S2.M_PI))); + + // FromCenterSize() + assertTrue( + S2LatLngRect.fromCenterSize(S2LatLng.fromDegrees(80, 170), S2LatLng.fromDegrees(40, 60)) + .approxEquals(rectFromDegrees(60, 140, 90, -160))); + assertTrue(S2LatLngRect + .fromCenterSize(S2LatLng.fromDegrees(10, 40), S2LatLng.fromDegrees(210, 400)).isFull()); + assertTrue( + S2LatLngRect.fromCenterSize(S2LatLng.fromDegrees(-90, 180), S2LatLng.fromDegrees(20, 50)) + .approxEquals(rectFromDegrees(-90, 155, -80, -155))); + + // FromPoint(), FromPointPair() + assertEquals(S2LatLngRect.fromPoint(d1.lo()), new S2LatLngRect(d1.lo(), d1.lo())); + assertEquals( + S2LatLngRect.fromPointPair(S2LatLng.fromDegrees(-35, -140), S2LatLng.fromDegrees(15, 155)), + rectFromDegrees(-35, 155, 15, -140)); + assertEquals( + S2LatLngRect.fromPointPair(S2LatLng.fromDegrees(25, -70), S2LatLng.fromDegrees(-90, 80)), + rectFromDegrees(-90, -70, 25, 80)); + + // GetCenter(), GetVertex(), Contains(S2LatLng), InteriorContains(S2LatLng). + S2LatLng eqM180 = S2LatLng.fromRadians(0, -S2.M_PI); + S2LatLng northPole = S2LatLng.fromRadians(S2.M_PI_2, 0); + S2LatLngRect r1 = new S2LatLngRect(eqM180, northPole); + + assertEquals(r1.getCenter(), S2LatLng.fromRadians(S2.M_PI_4, -S2.M_PI_2)); + assertEquals(r1.getVertex(0), S2LatLng.fromRadians(0, S2.M_PI)); + assertEquals(r1.getVertex(1), S2LatLng.fromRadians(0, 0)); + assertEquals(r1.getVertex(2), S2LatLng.fromRadians(S2.M_PI_2, 0)); + assertEquals(r1.getVertex(3), S2LatLng.fromRadians(S2.M_PI_2, S2.M_PI)); + assertTrue(r1.contains(S2LatLng.fromDegrees(30, -45))); + assertTrue(!r1.contains(S2LatLng.fromDegrees(30, 45))); + assertTrue(!r1.interiorContains(eqM180) && !r1.interiorContains(northPole)); + assertTrue(r1.contains(new S2Point(0.5, -0.3, 0.1))); + assertTrue(!r1.contains(new S2Point(0.5, 0.2, 0.1))); + + // Make sure that GetVertex() returns vertices in CCW order. + for (int i = 0; i < 4; ++i) { + double lat = S2.M_PI_4 * (i - 2); + double lng = S2.M_PI_2 * (i - 2) + 0.2; + S2LatLngRect r = new S2LatLngRect(new R1Interval(lat, lat + S2.M_PI_4), new S1Interval( + Math.IEEEremainder(lng, 2 * S2.M_PI), Math.IEEEremainder(lng + S2.M_PI_2, 2 * S2.M_PI))); + for (int k = 0; k < 4; ++k) { + assertTrue( + S2.simpleCCW(r.getVertex((k - 1) & 3).toPoint(), r.getVertex(k).toPoint(), + r.getVertex((k + 1) & 3).toPoint())); + } + } + + // Contains(S2LatLngRect), InteriorContains(S2LatLngRect), + // Intersects(), InteriorIntersects(), Union(), Intersection(). + // + // Much more testing of these methods is done in s1interval_unittest + // and r1interval_unittest. + + S2LatLngRect r1Mid = rectFromDegrees(45, -90, 45, -90); + S2LatLngRect reqM180 = new S2LatLngRect(eqM180, eqM180); + S2LatLngRect rNorthPole = new S2LatLngRect(northPole, northPole); + + testIntervalOps(r1, r1Mid, "TTTT", r1, r1Mid); + testIntervalOps(r1, reqM180, "TFTF", r1, reqM180); + testIntervalOps(r1, rNorthPole, "TFTF", r1, rNorthPole); + + assertTrue(r1.equals(rectFromDegrees(0, -180, 90, 0))); + testIntervalOps(r1, rectFromDegrees(-10, -1, 1, 20), "FFTT", rectFromDegrees(-10, -180, 90, 20), + rectFromDegrees(0, -1, 1, 0)); + testIntervalOps(r1, rectFromDegrees(-10, -1, 0, 20), "FFTF", rectFromDegrees(-10, -180, 90, 20), + rectFromDegrees(0, -1, 0, 0)); + testIntervalOps(r1, rectFromDegrees(-10, 0, 1, 20), "FFTF", rectFromDegrees(-10, -180, 90, 20), + rectFromDegrees(0, 0, 1, 0)); + + testIntervalOps(rectFromDegrees(-15, -160, -15, -150), rectFromDegrees(20, 145, 25, 155), + "FFFF", rectFromDegrees(-15, 145, 25, -150), empty); + testIntervalOps(rectFromDegrees(70, -10, 90, -140), rectFromDegrees(60, 175, 80, 5), "FFTT", + rectFromDegrees(60, -180, 90, 180), rectFromDegrees(70, 175, 80, 5)); + + // assertTrue that the intersection of two rectangles that overlap in + // latitude + // but not longitude is valid, and vice versa. + testIntervalOps(rectFromDegrees(12, 30, 60, 60), rectFromDegrees(0, 0, 30, 18), "FFFF", + rectFromDegrees(0, 0, 60, 60), empty); + testIntervalOps(rectFromDegrees(0, 0, 18, 42), rectFromDegrees(30, 12, 42, 60), "FFFF", + rectFromDegrees(0, 0, 42, 60), empty); + + // AddPoint() + S2LatLngRect p = S2LatLngRect.empty(); + p = p.addPoint(S2LatLng.fromDegrees(0, 0)); + p = p.addPoint(S2LatLng.fromRadians(0, -S2.M_PI_2)); + p = p.addPoint(S2LatLng.fromRadians(S2.M_PI_4, -S2.M_PI)); + p = p.addPoint(new S2Point(0, 0, 1)); + assertTrue(p.equals(r1)); + + // Expanded() + assertTrue( + rectFromDegrees(70, 150, 80, 170).expanded(S2LatLng.fromDegrees(20, 30)).approxEquals( + rectFromDegrees(50, 120, 90, -160))); + assertTrue(S2LatLngRect.empty().expanded(S2LatLng.fromDegrees(20, 30)).isEmpty()); + assertTrue(S2LatLngRect.full().expanded(S2LatLng.fromDegrees(20, 30)).isFull()); + assertTrue( + rectFromDegrees(-90, 170, 10, 20).expanded(S2LatLng.fromDegrees(30, 80)).approxEquals( + rectFromDegrees(-90, -180, 40, 180))); + + // ConvolveWithCap() + S2LatLngRect llr1 = + new S2LatLngRect(S2LatLng.fromDegrees(0, 170), S2LatLng.fromDegrees(0, -170)) + .convolveWithCap(S1Angle.degrees(15)); + S2LatLngRect llr2 = + new S2LatLngRect(S2LatLng.fromDegrees(-15, 155), S2LatLng.fromDegrees(15, -155)); + assertTrue(llr1.approxEquals(llr2)); + + llr1 = new S2LatLngRect(S2LatLng.fromDegrees(60, 150), S2LatLng.fromDegrees(80, 10)) + .convolveWithCap(S1Angle.degrees(15)); + llr2 = new S2LatLngRect(S2LatLng.fromDegrees(45, -180), S2LatLng.fromDegrees(90, 180)); + assertTrue(llr1.approxEquals(llr2)); + + // GetCapBound(), bounding cap at center is smaller: + assertTrue(new S2LatLngRect(S2LatLng.fromDegrees(-45, -45), S2LatLng.fromDegrees(45, 45)) + .getCapBound().approxEquals(S2Cap.fromAxisHeight(new S2Point(1, 0, 0), 0.5))); + // GetCapBound(), bounding cap at north pole is smaller: + assertTrue(new S2LatLngRect(S2LatLng.fromDegrees(88, -80), S2LatLng.fromDegrees(89, 80)) + .getCapBound().approxEquals(S2Cap.fromAxisAngle(new S2Point(0, 0, 1), S1Angle.degrees(2)))); + // GetCapBound(), longitude span > 180 degrees: + assertTrue( + new S2LatLngRect(S2LatLng.fromDegrees(-30, -150), S2LatLng.fromDegrees(-10, 50)) + .getCapBound() + .approxEquals(S2Cap.fromAxisAngle(new S2Point(0, 0, -1), S1Angle.degrees(80)))); + + // Contains(S2Cell), MayIntersect(S2Cell), Intersects(S2Cell) + + // Special cases. + testCellOps(empty, S2Cell.fromFacePosLevel(3, (byte) 0, 0), 0); + testCellOps(full, S2Cell.fromFacePosLevel(2, (byte) 0, 0), 4); + testCellOps(full, S2Cell.fromFacePosLevel(5, (byte) 0, 25), 4); + + // This rectangle includes the first quadrant of face 0. It's expanded + // slightly because cell bounding rectangles are slightly conservative. + S2LatLngRect r4 = rectFromDegrees(-45.1, -45.1, 0.1, 0.1); + testCellOps(r4, S2Cell.fromFacePosLevel(0, (byte) 0, 0), 3); + testCellOps(r4, S2Cell.fromFacePosLevel(0, (byte) 0, 1), 4); + testCellOps(r4, S2Cell.fromFacePosLevel(1, (byte) 0, 1), 0); + + // This rectangle intersects the first quadrant of face 0. + S2LatLngRect r5 = rectFromDegrees(-10, -45, 10, 0); + testCellOps(r5, S2Cell.fromFacePosLevel(0, (byte) 0, 0), 3); + testCellOps(r5, S2Cell.fromFacePosLevel(0, (byte) 0, 1), 3); + testCellOps(r5, S2Cell.fromFacePosLevel(1, (byte) 0, 1), 0); + + // Rectangle consisting of a single point. + testCellOps(rectFromDegrees(4, 4, 4, 4), S2Cell.fromFacePosLevel(0, (byte) 0, 0), 3); + + // Rectangles that intersect the bounding rectangle of a face + // but not the face itself. + testCellOps(rectFromDegrees(41, -87, 42, -79), S2Cell.fromFacePosLevel(2, (byte) 0, 0), 1); + testCellOps(rectFromDegrees(-41, 160, -40, -160), S2Cell.fromFacePosLevel(5, (byte) 0, 0), 1); + { + // This is the leaf cell at the top right hand corner of face 0. + // It has two angles of 60 degrees and two of 120 degrees. + S2Cell cell0tr = new S2Cell(new S2Point(1 + 1e-12, 1, 1)); + S2LatLngRect bound0tr = cell0tr.getRectBound(); + S2LatLng v0 = new S2LatLng(cell0tr.getVertexRaw(0)); + testCellOps( + rectFromDegrees(v0.lat().degrees() - 1e-8, v0.lng().degrees() - 1e-8, + v0.lat().degrees() - 2e-10, v0.lng().degrees() + 1e-10), cell0tr, 1); + } + + // Rectangles that intersect a face but where no vertex of one region + // is contained by the other region. The first one passes through + // a corner of one of the face cells. + testCellOps(rectFromDegrees(-37, -70, -36, -20), S2Cell.fromFacePosLevel(5, (byte) 0, 0), 2); + { + // These two intersect like a diamond and a square. + S2Cell cell202 = S2Cell.fromFacePosLevel(2, (byte) 0, 2); + S2LatLngRect bound202 = cell202.getRectBound(); + testCellOps( + rectFromDegrees(bound202.lo().lat().degrees() + 3, bound202.lo().lng().degrees() + 3, + bound202.hi().lat().degrees() - 3, bound202.hi().lng().degrees() - 3), cell202, 2); + } + } + + public void testArea() { + assertEquals(0.0, S2LatLngRect.empty().area()); + assertDoubleNear(4 * Math.PI, S2LatLngRect.full().area()); + assertDoubleNear(Math.PI / 2, rectFromDegrees(0, 0, 90, 90).area()); + } + + public void testEdgeBound() { + // assertTrue cases where min/max latitude is not at a vertex. + assertDoubleNear(getEdgeBound(1, 1, 1, 1, -1, 1).lat().hi(), S2.M_PI_4); // Max, + // CW + assertDoubleNear(getEdgeBound(1, -1, 1, 1, 1, 1).lat().hi(), S2.M_PI_4); // Max, + // CCW + assertDoubleNear(getEdgeBound(1, -1, -1, -1, -1, -1).lat().lo(), -S2.M_PI_4); // Min, + // CW + assertDoubleNear(getEdgeBound(-1, 1, -1, -1, -1, -1).lat().lo(), -S2.M_PI_4); // Min, + // CCW + + // assertTrue cases where the edge passes through one of the poles. + assertDoubleNear(getEdgeBound(.3, .4, 1, -.3, -.4, 1).lat().hi(), S2.M_PI_2); + assertDoubleNear(getEdgeBound(.3, .4, -1, -.3, -.4, -1).lat().lo(), -S2.M_PI_2); + + // assertTrue cases where the min/max latitude is attained at a vertex. + final double kCubeLat = Math.asin(Math.sqrt(1. / 3)); // 35.26 degrees + assertTrue( + getEdgeBound(1, 1, 1, 1, -1, -1).lat().approxEquals(new R1Interval(-kCubeLat, kCubeLat))); + assertTrue( + getEdgeBound(1, -1, 1, 1, 1, -1).lat().approxEquals(new R1Interval(-kCubeLat, kCubeLat))); + } + + public void testGetDistanceOverlapping() { + // Check pairs of rectangles that overlap: (should all return 0): + S2LatLngRect a = rectFromDegrees(0, 0, 2, 2); + S2LatLngRect b = pointRectFromDegrees(0, 0); + S1Angle zero = S1Angle.radians(0); + assertEquals(zero, a.getDistance(a)); + assertEquals(zero, a.getDistance(b)); + assertEquals(zero, b.getDistance(b)); + assertEquals(zero, a.getDistance(S2LatLng.fromDegrees(0, 0))); + assertEquals(zero, a.getDistance(rectFromDegrees(0, 1, 2, 3))); + assertEquals(zero, a.getDistance(rectFromDegrees(0, 2, 2, 4))); + assertEquals(zero, a.getDistance(rectFromDegrees(1, 0, 3, 2))); + assertEquals(zero, a.getDistance(rectFromDegrees(2, 0, 4, 2))); + assertEquals(zero, a.getDistance(rectFromDegrees(1, 1, 3, 3))); + assertEquals(zero, a.getDistance(rectFromDegrees(2, 2, 4, 4))); + } + + public void testGetDistanceRectVsPoint() { + // Rect that spans 180. + S2LatLngRect a = rectFromDegrees(-1, -1, 2, 1); + verifyGetDistance(a, pointRectFromDegrees(-2, -1)); + verifyGetDistance(a, pointRectFromDegrees(1, 2)); + + verifyGetDistance(pointRectFromDegrees(-2, -1), a); + verifyGetDistance(pointRectFromDegrees(1, 2), a); + + verifyGetRectPointDistance(a, S2LatLng.fromDegrees(-2, -1)); + verifyGetRectPointDistance(a, S2LatLng.fromDegrees(1, 2)); + + // Tests near the north pole. + S2LatLngRect b = rectFromDegrees(86, 0, 88, 2); + verifyGetDistance(b, pointRectFromDegrees(87, 3)); + verifyGetDistance(b, pointRectFromDegrees(87, -1)); + verifyGetDistance(b, pointRectFromDegrees(89, 1)); + verifyGetDistance(b, pointRectFromDegrees(89, 181)); + verifyGetDistance(b, pointRectFromDegrees(85, 1)); + verifyGetDistance(b, pointRectFromDegrees(85, 181)); + verifyGetDistance(b, pointRectFromDegrees(90, 0)); + + verifyGetDistance(pointRectFromDegrees(87, 3), b); + verifyGetDistance(pointRectFromDegrees(87, -1), b); + verifyGetDistance(pointRectFromDegrees(89, 1), b); + verifyGetDistance(pointRectFromDegrees(89, 181), b); + verifyGetDistance(pointRectFromDegrees(85, 1), b); + verifyGetDistance(pointRectFromDegrees(85, 181), b); + verifyGetDistance(pointRectFromDegrees(90, 0), b); + + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(87, 3)); + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(87, -1)); + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(89, 1)); + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(89, 181)); + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(85, 1)); + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(85, 181)); + verifyGetRectPointDistance(b, S2LatLng.fromDegrees(90, 0)); + + // Rect that touches the north pole. + S2LatLngRect c = rectFromDegrees(88, 0, 90, 2); + verifyGetDistance(c, pointRectFromDegrees(89, 3)); + verifyGetDistance(c, pointRectFromDegrees(89, 90)); + verifyGetDistance(c, pointRectFromDegrees(89, 181)); + verifyGetDistance(pointRectFromDegrees(89, 3), c); + verifyGetDistance(pointRectFromDegrees(89, 90), c); + verifyGetDistance(pointRectFromDegrees(89, 181), c); + } + + public void testGetDistanceRectVsRect() { + // Rect that spans 180. + S2LatLngRect a = rectFromDegrees(-1, -1, 2, 1); + verifyGetDistance(a, rectFromDegrees(0, 2, 1, 3)); + verifyGetDistance(a, rectFromDegrees(-2, -3, -1, -2)); + + // Tests near the south pole. + S2LatLngRect b = rectFromDegrees(-87, 0, -85, 3); + verifyGetDistance(b, rectFromDegrees(-89, 1, -88, 2)); + verifyGetDistance(b, rectFromDegrees(-84, 1, -83, 2)); + verifyGetDistance(b, rectFromDegrees(-88, 90, -86, 91)); + verifyGetDistance(b, rectFromDegrees(-84, -91, -83, -90)); + verifyGetDistance(b, rectFromDegrees(-90, 181, -89, 182)); + verifyGetDistance(b, rectFromDegrees(-84, 181, -83, 182)); + } + + public void testGetDistanceRandomPairs() { + // Test random pairs. + for (int i = 0; i < 10000; ++i) { + S2LatLngRect a = + S2LatLngRect.fromPointPair(new S2LatLng(randomPoint()), new S2LatLng(randomPoint())); + S2LatLngRect b = + S2LatLngRect.fromPointPair(new S2LatLng(randomPoint()), new S2LatLng(randomPoint())); + verifyGetDistance(a, b); + + + S2LatLng c = new S2LatLng(randomPoint()); + verifyGetRectPointDistance(a, c); + verifyGetRectPointDistance(b, c); + } + } + + private static S1Angle bruteForceDistance(S2LatLngRect a, S2LatLngRect b) { + if (a.intersects(b)) { + return S1Angle.radians(0); + } + + // Compare every point in 'a' against every latitude edge and longitude edge + // in 'b', and vice-versa, for a total of 16 point-vs-latitude-edge tests + // and 16 point-vs-longitude-edge tests. + S2LatLng pntA[] = + {new S2LatLng(a.latLo(), a.lngLo()), new S2LatLng(a.latLo(), a.lngHi()), + new S2LatLng(a.latHi(), a.lngHi()), new S2LatLng(a.latHi(), a.lngLo())}; + S2LatLng pntB[] = + {new S2LatLng(b.latLo(), b.lngLo()), new S2LatLng(b.latLo(), b.lngHi()), + new S2LatLng(b.latHi(), b.lngHi()), new S2LatLng(b.latHi(), b.lngLo())}; + + // Make arrays containing the lo/hi latitudes and the lo/hi longitude edges. + S1Angle latA[] = {a.latLo(), a.latHi()}; + S1Angle latB[] = {b.latLo(), b.latHi()}; + S2Point lng_edge_a[][] = + { {pntA[0].toPoint(), pntA[3].toPoint()}, {pntA[1].toPoint(), pntA[2].toPoint()}}; + S2Point lng_edge_b[][] = + { {pntB[0].toPoint(), pntB[3].toPoint()}, {pntB[1].toPoint(), pntB[2].toPoint()}}; + + S1Angle minDistance = S1Angle.degrees(180.0); + for (int i = 0; i < 4; ++i) { + // For each point in a and b. + S2LatLng currentA = pntA[i]; + S2LatLng currentB = pntB[i]; + + for (int j = 0; j < 2; ++j) { + // Get distances to latitude and longitude edges. + S1Angle aToLat = getDistance(currentA, latB[j], b.lng()); + S1Angle bToLat = getDistance(currentB, latA[j], a.lng()); + S1Angle aToLng = + S2EdgeUtil.getDistance(currentA.toPoint(), lng_edge_b[j][0], lng_edge_b[j][1]); + S1Angle bToLng = + S2EdgeUtil.getDistance(currentB.toPoint(), lng_edge_a[j][0], lng_edge_a[j][1]); + + minDistance = S1Angle.min( + minDistance, S1Angle.min(aToLat, S1Angle.min(bToLat, S1Angle.min(aToLng, bToLng)))); + } + } + return minDistance; + } + + private static S1Angle bruteForceRectPointDistance(S2LatLngRect a, S2LatLng b) { + if (a.contains(b)) { + return S1Angle.radians(0); + } + + S1Angle bToLoLat = getDistance(b, a.latLo(), a.lng()); + S1Angle bToHiLat = getDistance(b, a.latHi(), a.lng()); + S1Angle bToLoLng = + S2EdgeUtil.getDistance(b.toPoint(), new S2LatLng(a.latLo(), a.lngLo()).toPoint(), + new S2LatLng(a.latHi(), a.lngLo()).toPoint()); + S1Angle bToHiLng = + S2EdgeUtil.getDistance(b.toPoint(), new S2LatLng(a.latLo(), a.lngHi()).toPoint(), + new S2LatLng(a.latHi(), a.lngHi()).toPoint()); + return S1Angle.min(bToLoLat, S1Angle.min(bToHiLat, S1Angle.min(bToLoLng, bToHiLng))); + } + + /** + * Returns the minimum distance from X to the latitude line segment defined by + * the given latitude and longitude interval. + */ + private static S1Angle getDistance(S2LatLng x, S1Angle lat, S1Interval interval) { + assertTrue(x.isValid()); + assertTrue(interval.isValid()); + + // Is X inside the longitude interval? + if (interval.contains(x.lng().radians())) + return S1Angle.radians(Math.abs(x.lat().radians() - lat.radians())); + + // Return the distance to the closer endpoint. + return S1Angle.min(x.getDistance(new S2LatLng(lat, S1Angle.radians(interval.lo()))), + x.getDistance(new S2LatLng(lat, S1Angle.radians(interval.hi())))); + } + + private static S2LatLngRect getEdgeBound(double x1, + double y1, + double z1, + double x2, + double y2, + double z2) { + return S2LatLngRect.fromEdge( + S2Point.normalize(new S2Point(x1, y1, z1)), S2Point.normalize(new S2Point(x2, y2, z2))); + } + + private static S2LatLngRect pointRectFromDegrees(double lat, double lng) { + return S2LatLngRect.fromPoint(S2LatLng.fromDegrees(lat, lng).normalized()); + } + + private static S2LatLngRect rectFromDegrees( + double latLo, double lngLo, double latHi, double lngHi) { + // Convenience method to construct a rectangle. This method is + // intentionally *not* in the S2LatLngRect interface because the + // argument order is ambiguous, but hopefully it's not too confusing + // within the context of this unit test. + + return new S2LatLngRect(S2LatLng.fromDegrees(latLo, lngLo).normalized(), + S2LatLng.fromDegrees(latHi, lngHi).normalized()); + } + + /** + * This method verifies a.getDistance(b), where b is a S2LatLng, by comparing + * its result against a.getDistance(c), c being the point rectangle created + * from b. + */ + private static void verifyGetRectPointDistance(S2LatLngRect a, S2LatLng p) { + S1Angle distance1 = bruteForceRectPointDistance(a, p.normalized()); + S1Angle distance2 = a.getDistance(p.normalized()); + assertEquals(distance1.radians(), distance2.radians(), 1e-10); + } + + /** + * This method verifies a.getDistance(b) by comparing its result against a + * brute-force implementation. The correctness of the brute-force version is + * much easier to verify by inspection. + */ + private static void verifyGetDistance(S2LatLngRect a, S2LatLngRect b) { + S1Angle distance1 = bruteForceDistance(a, b); + S1Angle distance2 = a.getDistance(b); + assertEquals(distance1.radians(), distance2.radians(), 1e-10); + } +} diff --git a/tests/com/google/common/geometry/S2LatLngTest.java b/tests/com/google/common/geometry/S2LatLngTest.java new file mode 100644 index 0000000..6a2ee24 --- /dev/null +++ b/tests/com/google/common/geometry/S2LatLngTest.java @@ -0,0 +1,90 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +public strictfp class S2LatLngTest extends GeometryTestCase { + + public void testBasic() { + S2LatLng llRad = S2LatLng.fromRadians(S2.M_PI_4, S2.M_PI_2); + assertTrue(llRad.lat().radians() == S2.M_PI_4); + assertTrue(llRad.lng().radians() == S2.M_PI_2); + assertTrue(llRad.isValid()); + S2LatLng llDeg = S2LatLng.fromDegrees(45, 90); + assertEquals(llDeg, llRad); + assertTrue(llDeg.isValid()); + assertTrue(!S2LatLng.fromDegrees(-91, 0).isValid()); + assertTrue(!S2LatLng.fromDegrees(0, 181).isValid()); + + S2LatLng bad = S2LatLng.fromDegrees(120, 200); + assertTrue(!bad.isValid()); + S2LatLng better = bad.normalized(); + assertTrue(better.isValid()); + assertEquals(better.lat(), S1Angle.degrees(90)); + assertDoubleNear(better.lng().radians(), S1Angle.degrees(-160).radians()); + + bad = S2LatLng.fromDegrees(-100, -360); + assertTrue(!bad.isValid()); + better = bad.normalized(); + assertTrue(better.isValid()); + assertEquals(better.lat(), S1Angle.degrees(-90)); + assertDoubleNear(better.lng().radians(), 0); + + assertTrue((S2LatLng.fromDegrees(10, 20).add(S2LatLng.fromDegrees(20, 30))).approxEquals( + S2LatLng.fromDegrees(30, 50))); + assertTrue((S2LatLng.fromDegrees(10, 20).sub(S2LatLng.fromDegrees(20, 30))).approxEquals( + S2LatLng.fromDegrees(-10, -10))); + assertTrue((S2LatLng.fromDegrees(10, 20).mul(0.5)).approxEquals(S2LatLng.fromDegrees(5, 10))); + } + + public void testConversion() { + // Test special cases: poles, "date line" + assertDoubleNear( + new S2LatLng(S2LatLng.fromDegrees(90.0, 65.0).toPoint()).lat().degrees(), 90.0); + assertEquals( + new S2LatLng(S2LatLng.fromRadians(-S2.M_PI_2, 1).toPoint()).lat().radians(), -S2.M_PI_2); + assertDoubleNear( + Math.abs(new S2LatLng(S2LatLng.fromDegrees(12.2, 180.0).toPoint()).lng().degrees()), 180.0); + assertEquals( + Math.abs(new S2LatLng(S2LatLng.fromRadians(0.1, -S2.M_PI).toPoint()).lng().radians()), + S2.M_PI); + + // Test a bunch of random points. + for (int i = 0; i < 100000; ++i) { + S2Point p = randomPoint(); + assertTrue(S2.approxEquals(p, new S2LatLng(p).toPoint())); + } + + // Test generation from E5 + S2LatLng test = S2LatLng.fromE5(123456, 98765); + assertDoubleNear(test.lat().degrees(), 1.23456); + assertDoubleNear(test.lng().degrees(), 0.98765); + } + + public void testDistance() { + assertEquals( + S2LatLng.fromDegrees(90, 0).getDistance(S2LatLng.fromDegrees(90, 0)).radians(), 0.0); + assertDoubleNear( + S2LatLng.fromDegrees(-37, 25).getDistance(S2LatLng.fromDegrees(-66, -155)).degrees(), 77, + 1e-13); + assertDoubleNear( + S2LatLng.fromDegrees(0, 165).getDistance(S2LatLng.fromDegrees(0, -80)).degrees(), 115, + 1e-13); + assertDoubleNear( + S2LatLng.fromDegrees(47, -127).getDistance(S2LatLng.fromDegrees(-47, 53)).degrees(), 180, + 2e-6); + } + +} diff --git a/tests/com/google/common/geometry/S2LoopTest.java b/tests/com/google/common/geometry/S2LoopTest.java new file mode 100644 index 0000000..f6ca5f8 --- /dev/null +++ b/tests/com/google/common/geometry/S2LoopTest.java @@ -0,0 +1,583 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.logging.Logger; + +/** + * Tests for {@link S2Loop}. + * + * Note that testLoopRelations2() is suppressed because it fails in corner + * cases due to a problem with S2.robustCCW(). + * + */ +public strictfp class S2LoopTest extends GeometryTestCase { + private static final Logger log = Logger.getLogger(S2LoopTest.class.getCanonicalName()); + + // A stripe that slightly over-wraps the equator. + private S2Loop candyCane = makeLoop("-20:150, -20:-70, 0:70, 10:-150, 10:70, -10:-70"); + + // A small clockwise loop in the northern & eastern hemisperes. + private S2Loop smallNeCw = makeLoop("35:20, 45:20, 40:25"); + + // Loop around the north pole at 80 degrees. + private S2Loop arctic80 = makeLoop("80:-150, 80:-30, 80:90"); + + // Loop around the south pole at 80 degrees. + private S2Loop antarctic80 = makeLoop("-80:120, -80:0, -80:-120"); + + // The northern hemisphere, defined using two pairs of antipodal points. + private S2Loop northHemi = makeLoop("0:-180, 0:-90, 0:0, 0:90"); + + // The northern hemisphere, defined using three points 120 degrees apart. + private S2Loop northHemi3 = makeLoop("0:-180, 0:-60, 0:60"); + + // The western hemisphere, defined using two pairs of antipodal points. + private S2Loop westHemi = makeLoop("0:-180, -90:0, 0:0, 90:0"); + + // The "near" hemisphere, defined using two pairs of antipodal points. + private S2Loop nearHemi = makeLoop("0:-90, -90:0, 0:90, 90:0"); + + // A diamond-shaped loop around the point 0:180. + private S2Loop loopA = makeLoop("0:178, -1:180, 0:-179, 1:-180"); + + // Another diamond-shaped loop around the point 0:180. + private S2Loop loopB = makeLoop("0:179, -1:180, 0:-178, 1:-180"); + + // The intersection of A and B. + private S2Loop aIntersectB = makeLoop("0:179, -1:180, 0:-179, 1:-180"); + + // The union of A and B. + private S2Loop aUnionB = makeLoop("0:178, -1:180, 0:-178, 1:-180"); + + // A minus B (concave) + private S2Loop aMinusB = makeLoop("0:178, -1:180, 0:179, 1:-180"); + + // B minus A (concave) + private S2Loop bMinusA = makeLoop("0:-179, -1:180, 0:-178, 1:-180"); + + // A self-crossing loop with a duplicated vertex + private S2Loop bowtie = makeLoop("0:0, 2:0, 1:1, 0:2, 2:2, 1:1"); + + // Initialized below. + private S2Loop southHemi; + private S2Loop eastHemi; + private S2Loop farHemi; + + @Override + public void setUp() { + super.setUp(); + S2Loop.debugMode = true; + + southHemi = new S2Loop(northHemi); + southHemi.invert(); + + eastHemi = new S2Loop(westHemi); + eastHemi.invert(); + + farHemi = new S2Loop(nearHemi); + farHemi.invert(); + } + + public void testBounds() { + assertTrue(candyCane.getRectBound().lng().isFull()); + assertTrue(candyCane.getRectBound().latLo().degrees() < -20); + assertTrue(candyCane.getRectBound().latHi().degrees() > 10); + assertTrue(smallNeCw.getRectBound().isFull()); + assertEquals(arctic80.getRectBound(), + new S2LatLngRect(S2LatLng.fromDegrees(80, -180), S2LatLng.fromDegrees(90, 180))); + assertEquals(antarctic80.getRectBound(), + new S2LatLngRect(S2LatLng.fromDegrees(-90, -180), S2LatLng.fromDegrees(-80, 180))); + + arctic80.invert(); + // The highest latitude of each edge is attained at its midpoint. + S2Point mid = S2Point.mul(S2Point.add(arctic80.vertex(0), arctic80.vertex(1)), 0.5); + assertDoubleNear(arctic80.getRectBound().latHi().radians(), new S2LatLng(mid).lat().radians()); + arctic80.invert(); + + assertTrue(southHemi.getRectBound().lng().isFull()); + assertEquals(southHemi.getRectBound().lat(), new R1Interval(-S2.M_PI_2, 0)); + } + + public void testAreaCentroid() { + assertDoubleNear(northHemi.getArea(), 2 * S2.M_PI); + assertDoubleNear(eastHemi.getArea(), 2 * S2.M_PI); + + // Construct spherical caps of random height, and approximate their boundary + // with closely spaces vertices. Then check that the area and centroid are + // correct. + + for (int i = 0; i < 100; ++i) { + // Choose a coordinate frame for the spherical cap. + S2Point x = randomPoint(); + S2Point y = S2Point.normalize(S2Point.crossProd(x, randomPoint())); + S2Point z = S2Point.normalize(S2Point.crossProd(x, y)); + + // Given two points at latitude phi and whose longitudes differ by dtheta, + // the geodesic between the two points has a maximum latitude of + // atan(tan(phi) / cos(dtheta/2)). This can be derived by positioning + // the two points at (-dtheta/2, phi) and (dtheta/2, phi). + // + // We want to position the vertices close enough together so that their + // maximum distance from the boundary of the spherical cap is kMaxDist. + // Thus we want fabs(atan(tan(phi) / cos(dtheta/2)) - phi) <= kMaxDist. + double kMaxDist = 1e-6; + double height = 2 * rand.nextDouble(); + double phi = Math.asin(1 - height); + double maxDtheta = + 2 * Math.acos(Math.tan(Math.abs(phi)) / Math.tan(Math.abs(phi) + kMaxDist)); + maxDtheta = Math.min(S2.M_PI, maxDtheta); // At least 3 vertices. + + List<S2Point> vertices = Lists.newArrayList(); + for (double theta = 0; theta < 2 * S2.M_PI; theta += rand.nextDouble() * maxDtheta) { + + S2Point xCosThetaCosPhi = S2Point.mul(x, (Math.cos(theta) * Math.cos(phi))); + S2Point ySinThetaCosPhi = S2Point.mul(y, (Math.sin(theta) * Math.cos(phi))); + S2Point zSinPhi = S2Point.mul(z, Math.sin(phi)); + + S2Point sum = S2Point.add(S2Point.add(xCosThetaCosPhi, ySinThetaCosPhi), zSinPhi); + + vertices.add(sum); + } + + S2Loop loop = new S2Loop(vertices); + S2AreaCentroid areaCentroid = loop.getAreaAndCentroid(); + + double area = loop.getArea(); + S2Point centroid = loop.getCentroid(); + double expectedArea = 2 * S2.M_PI * height; + assertTrue(areaCentroid.getArea() == area); + assertTrue(centroid.equals(areaCentroid.getCentroid())); + assertTrue(Math.abs(area - expectedArea) <= 2 * S2.M_PI * kMaxDist); + + // high probability + assertTrue(Math.abs(area - expectedArea) >= 0.01 * kMaxDist); + + S2Point expectedCentroid = S2Point.mul(z, expectedArea * (1 - 0.5 * height)); + + assertTrue(S2Point.sub(centroid, expectedCentroid).norm() <= 2 * kMaxDist); + } + } + + private S2Loop rotate(S2Loop loop) { + List<S2Point> vertices = Lists.newArrayList(); + for (int i = 1; i <= loop.numVertices(); ++i) { + vertices.add(loop.vertex(i)); + } + return new S2Loop(vertices); + } + + public void testContains() { + assertTrue(candyCane.contains(S2LatLng.fromDegrees(5, 71).toPoint())); + for (int i = 0; i < 4; ++i) { + assertTrue(northHemi.contains(new S2Point(0, 0, 1))); + assertTrue(!northHemi.contains(new S2Point(0, 0, -1))); + assertTrue(!southHemi.contains(new S2Point(0, 0, 1))); + assertTrue(southHemi.contains(new S2Point(0, 0, -1))); + assertTrue(!westHemi.contains(new S2Point(0, 1, 0))); + assertTrue(westHemi.contains(new S2Point(0, -1, 0))); + assertTrue(eastHemi.contains(new S2Point(0, 1, 0))); + assertTrue(!eastHemi.contains(new S2Point(0, -1, 0))); + northHemi = rotate(northHemi); + southHemi = rotate(southHemi); + eastHemi = rotate(eastHemi); + westHemi = rotate(westHemi); + } + + // This code checks each cell vertex is contained by exactly one of + // the adjacent cells. + for (int level = 0; level < 3; ++level) { + List<S2Loop> loops = Lists.newArrayList(); + List<S2Point> loopVertices = Lists.newArrayList(); + Set<S2Point> points = Sets.newHashSet(); + for (S2CellId id = S2CellId.begin(level); !id.equals(S2CellId.end(level)); id = id.next()) { + S2Cell cell = new S2Cell(id); + points.add(cell.getCenter()); + for (int k = 0; k < 4; ++k) { + loopVertices.add(cell.getVertex(k)); + points.add(cell.getVertex(k)); + } + loops.add(new S2Loop(loopVertices)); + loopVertices.clear(); + } + for (S2Point point : points) { + int count = 0; + for (int j = 0; j < loops.size(); ++j) { + if (loops.get(j).contains(point)) { + ++count; + } + } + assertEquals(count, 1); + } + } + } + + private S2CellId advance(S2CellId id, int n) { + while (id.isValid() && --n >= 0) { + id = id.next(); + } + return id; + } + + private S2Loop makeCellLoop(S2CellId begin, S2CellId end) { + // Construct a CCW polygon whose boundary is the union of the cell ids + // in the range [begin, end). We add the edges one by one, removing + // any edges that are already present in the opposite direction. + + Map<S2Point, Set<S2Point>> edges = Maps.newHashMap(); + for (S2CellId id = begin; !id.equals(end); id = id.next()) { + S2Cell cell = new S2Cell(id); + for (int k = 0; k < 4; ++k) { + S2Point a = cell.getVertex(k); + S2Point b = cell.getVertex((k + 1) & 3); + if (edges.get(b) == null) { + edges.put(b, Sets.<S2Point>newHashSet()); + } + // if a is in b's set, remove it and remove b's set if it's empty + // otherwise, add b to a's set + if (!edges.get(b).remove(a)) { + if (edges.get(a) == null) { + edges.put(a, Sets.<S2Point>newHashSet()); + } + edges.get(a).add(b); + } else if (edges.get(b).isEmpty()) { + edges.remove(b); + } + } + } + + // The remaining edges form a single loop. We simply follow it starting + // at an arbitrary vertex and build up a list of vertices. + + List<S2Point> vertices = Lists.newArrayList(); + S2Point p = edges.keySet().iterator().next(); + while (!edges.isEmpty()) { + assertEquals(1, edges.get(p).size()); + S2Point next = edges.get(p).iterator().next(); + vertices.add(p); + edges.remove(p); + p = next; + } + return new S2Loop(vertices); + } + + private void assertRelation( + S2Loop a, S2Loop b, int containsOrCrosses, boolean intersects, boolean nestable) { + assertEquals(a.contains(b), containsOrCrosses == 1); + assertEquals(a.intersects(b), intersects); + if (nestable) { + assertEquals(a.containsNested(b), a.contains(b)); + } + if (containsOrCrosses >= -1) { + assertEquals(a.containsOrCrosses(b), containsOrCrosses); + } + } + + public void testLoopRelations() { + assertRelation(northHemi, northHemi, 1, true, false); + assertRelation(northHemi, southHemi, 0, false, false); + assertRelation(northHemi, eastHemi, -1, true, false); + assertRelation(northHemi, arctic80, 1, true, true); + assertRelation(northHemi, antarctic80, 0, false, true); + assertRelation(northHemi, candyCane, -1, true, false); + + // We can't compare northHemi3 vs. northHemi or southHemi. + assertRelation(northHemi3, northHemi3, 1, true, false); + assertRelation(northHemi3, eastHemi, -1, true, false); + assertRelation(northHemi3, arctic80, 1, true, true); + assertRelation(northHemi3, antarctic80, 0, false, true); + assertRelation(northHemi3, candyCane, -1, true, false); + + assertRelation(southHemi, northHemi, 0, false, false); + assertRelation(southHemi, southHemi, 1, true, false); + assertRelation(southHemi, farHemi, -1, true, false); + assertRelation(southHemi, arctic80, 0, false, true); + assertRelation(southHemi, antarctic80, 1, true, true); + assertRelation(southHemi, candyCane, -1, true, false); + + assertRelation(candyCane, northHemi, -1, true, false); + assertRelation(candyCane, southHemi, -1, true, false); + assertRelation(candyCane, arctic80, 0, false, true); + assertRelation(candyCane, antarctic80, 0, false, true); + assertRelation(candyCane, candyCane, 1, true, false); + + assertRelation(nearHemi, westHemi, -1, true, false); + + assertRelation(smallNeCw, southHemi, 1, true, false); + assertRelation(smallNeCw, westHemi, 1, true, false); + assertRelation(smallNeCw, northHemi, -2, true, false); + assertRelation(smallNeCw, eastHemi, -2, true, false); + + assertRelation(loopA, loopA, 1, true, false); + assertRelation(loopA, loopB, -1, true, false); + assertRelation(loopA, aIntersectB, 1, true, false); + assertRelation(loopA, aUnionB, 0, true, false); + assertRelation(loopA, aMinusB, 1, true, false); + assertRelation(loopA, bMinusA, 0, false, false); + + assertRelation(loopB, loopA, -1, true, false); + assertRelation(loopB, loopB, 1, true, false); + assertRelation(loopB, aIntersectB, 1, true, false); + assertRelation(loopB, aUnionB, 0, true, false); + assertRelation(loopB, aMinusB, 0, false, false); + assertRelation(loopB, bMinusA, 1, true, false); + + assertRelation(aIntersectB, loopA, 0, true, false); + assertRelation(aIntersectB, loopB, 0, true, false); + assertRelation(aIntersectB, aIntersectB, 1, true, false); + assertRelation(aIntersectB, aUnionB, 0, true, true); + assertRelation(aIntersectB, aMinusB, 0, false, false); + assertRelation(aIntersectB, bMinusA, 0, false, false); + + assertRelation(aUnionB, loopA, 1, true, false); + assertRelation(aUnionB, loopB, 1, true, false); + assertRelation(aUnionB, aIntersectB, 1, true, true); + assertRelation(aUnionB, aUnionB, 1, true, false); + assertRelation(aUnionB, aMinusB, 1, true, false); + assertRelation(aUnionB, bMinusA, 1, true, false); + + assertRelation(aMinusB, loopA, 0, true, false); + assertRelation(aMinusB, loopB, 0, false, false); + assertRelation(aMinusB, aIntersectB, 0, false, false); + assertRelation(aMinusB, aUnionB, 0, true, false); + assertRelation(aMinusB, aMinusB, 1, true, false); + assertRelation(aMinusB, bMinusA, 0, false, true); + + assertRelation(bMinusA, loopA, 0, false, false); + assertRelation(bMinusA, loopB, 0, true, false); + assertRelation(bMinusA, aIntersectB, 0, false, false); + assertRelation(bMinusA, aUnionB, 0, true, false); + assertRelation(bMinusA, aMinusB, 0, false, true); + assertRelation(bMinusA, bMinusA, 1, true, false); + } + + /** + * TODO(user, ericv) Fix this test. It fails sporadically. + * <p> + * The problem is not in this test, it is that + * {@link S2#robustCCW(S2Point, S2Point, S2Point)} currently requires + * arbitrary-precision arithmetic to be truly robust. That means it can give + * the wrong answers in cases where we are trying to determine edge + * intersections. + * <p> + * It seems the strictfp modifier here in java (required for correctness in + * other areas of the library) restricts the size of temporary registers, + * causing us to lose some of the precision that the C++ version gets. + * <p> + * This test fails when it randomly chooses a cell loop with nearly colinear + * edges. That's where S2.robustCCW provides the wrong answer. Note that there + * is an attempted workaround in {@link S2Loop#isValid(List)}, but it + * does not cover all cases. + */ + public void suppressedTestLoopRelations2() { + // Construct polygons consisting of a sequence of adjacent cell ids + // at some fixed level. Comparing two polygons at the same level + // ensures that there are no T-vertices. + for (int iter = 0; iter < 1000; ++iter) { + long num = rand.nextLong(); + S2CellId begin = new S2CellId(num | 1); + if (!begin.isValid()) { + continue; + } + begin = begin.parent((int) Math.round(rand.nextDouble() * S2CellId.MAX_LEVEL)); + S2CellId aBegin = advance(begin, skewed(6)); + S2CellId aEnd = advance(aBegin, skewed(6) + 1); + S2CellId bBegin = advance(begin, skewed(6)); + S2CellId bEnd = advance(bBegin, skewed(6) + 1); + if (!aEnd.isValid() || !bEnd.isValid()) { + continue; + } + + S2Loop a = makeCellLoop(aBegin, aEnd); + S2Loop b = makeCellLoop(bBegin, bEnd); + boolean contained = (aBegin.lessOrEquals(bBegin) && bEnd.lessOrEquals(aEnd)); + boolean intersects = (aBegin.lessThan(bEnd) && bBegin.lessThan(aEnd)); + log.info( + "Checking " + a.numVertices() + " vs. " + b.numVertices() + ", contained = " + contained + + ", intersects = " + intersects); + + assertEquals(contained, a.contains(b)); + assertEquals(intersects, a.intersects(b)); + } + } + + /** + * Tests that nearly colinear points pass S2Loop.isValid() + */ + public void testRoundingError() { + S2Point a = new S2Point(-0.9190364081111774, 0.17231932652084575, 0.35451111445694833); + S2Point b = new S2Point(-0.92130667053206, 0.17274500072476123, 0.3483578383756171); + S2Point c = new S2Point(-0.9257244057938284, 0.17357332608634282, 0.3360158106235289); + S2Point d = new S2Point(-0.9278712595449962, 0.17397586116468677, 0.32982923679138537); + + assertTrue(S2Loop.isValid(Lists.newArrayList(a, b, c, d))); + } + + /** + * Returns true if the loop points satisfy {@link S2Loop#isValid(List)}. + */ + private boolean isValid(S2Loop loop) { + List<S2Point> vertices = Lists.newArrayList(); + for (int i = 0; i < loop.numVertices(); ++i) { + vertices.add(loop.vertex(i)); + } + return S2Loop.isValid(vertices); + } + + /** + * Tests {@link S2Loop#isValid(List)}. + */ + public void testIsValid() { + assertTrue(isValid(loopA)); + assertTrue(isValid(loopB)); + assertFalse(isValid(bowtie)); + } + + /** + * Tests {@link S2Loop#compareTo(S2Loop)}. + */ + public void testComparisons() { + S2Loop abc = makeLoop("0:1, 0:2, 1:2"); + S2Loop abcd = makeLoop("0:1, 0:2, 1:2, 1:1"); + S2Loop abcde = makeLoop("0:1, 0:2, 1:2, 1:1, 1:0"); + assertTrue(abc.compareTo(abcd) < 0); + assertTrue(abc.compareTo(abcde) < 0); + assertTrue(abcd.compareTo(abcde) < 0); + assertTrue(abcd.compareTo(abc) > 0); + assertTrue(abcde.compareTo(abc) > 0); + assertTrue(abcde.compareTo(abcd) > 0); + + S2Loop bcda = makeLoop("0:2, 1:2, 1:1, 0:1"); + assertEquals(0, abcd.compareTo(bcda)); + assertEquals(0, bcda.compareTo(abcd)); + + S2Loop wxyz = makeLoop("10:11, 10:12, 11:12, 11:11"); + assertTrue(abcd.compareTo(wxyz) > 0); + assertTrue(wxyz.compareTo(abcd) < 0); + } + + public void testGetDistance() { + // Error margin since we're doing numerical computations + double epsilon = 1e-15; + + // A square with (lat,lng) vertices (0,1), (1,1), (1,2) and (0,2) + // Tests the case where the shortest distance is along a normal to an edge, + // onto a vertex + S2Loop s1 = makeLoop("0:1, 1:1, 1:2, 0:2"); + + // A square with (lat,lng) vertices (-1,1), (1,1), (1,2) and (-1,2) + // Tests the case where the shortest distance is along a normal to an edge, + // not onto a vertex + S2Loop s2 = makeLoop("-1:1, 1:1, 1:2, -1:2"); + + // A diamond with (lat,lng) vertices (1,0), (2,1), (3,0) and (2,-1) + // Test the case where the shortest distance is NOT along a normal to an + // edge + S2Loop s3 = makeLoop("1:0, 2:1, 3:0, 2:-1"); + + // All the vertices should be distance 0 + for (int i = 0; i < s1.numVertices(); i++) { + assertEquals(0d, s1.getDistance(s1.vertex(i)).radians(), epsilon); + } + + // A point on one of the edges should be distance 0 + assertEquals(0d, s1.getDistance(S2LatLng.fromDegrees(0.5, 1).toPoint()).radians(), epsilon); + + // In all three cases, the closest point to the origin is (0,1), which is at + // a distance of 1 degree. + // Note: all of these are intentionally distances measured along the + // equator, since that makes the math significantly simpler. Otherwise, the + // distance wouldn't actually be 1 degree. + S2Point origin = S2LatLng.fromDegrees(0, 0).toPoint(); + assertEquals(1d, s1.getDistance(origin).degrees(), epsilon); + assertEquals(1d, s2.getDistance(origin).degrees(), epsilon); + assertEquals(1d, s3.getDistance(origin).degrees(), epsilon); + } + + /** + * This function is useful for debugging. + */ + @SuppressWarnings("unused") + private void dumpCrossings(S2Loop loop) { + + System.out.println("Ortho(v1): " + S2.ortho(loop.vertex(1))); + System.out.printf("Contains(kOrigin): %b\n", loop.contains(S2.origin())); + for (int i = 1; i <= loop.numVertices(); ++i) { + S2Point a = S2.ortho(loop.vertex(i)); + S2Point b = loop.vertex(i - 1); + S2Point c = loop.vertex(i + 1); + S2Point o = loop.vertex(i); + System.out.printf("Vertex %d: [%.17g, %.17g, %.17g], " + + "%d%dR=%d, %d%d%d=%d, R%d%d=%d, inside: %b\n", + i, + loop.vertex(i).x, + loop.vertex(i).y, + loop.vertex(i).z, + i - 1, + i, + S2.robustCCW(b, o, a), + i + 1, + i, + i - 1, + S2.robustCCW(c, o, b), + i, + i + 1, + S2.robustCCW(a, o, c), + S2.orderedCCW(a, b, c, o)); + } + for (int i = 0; i < loop.numVertices() + 2; ++i) { + S2Point orig = S2.origin(); + S2Point dest; + if (i < loop.numVertices()) { + dest = loop.vertex(i); + System.out.printf("Origin->%d crosses:", i); + } else { + dest = new S2Point(0, 0, 1); + if (i == loop.numVertices() + 1) { + orig = loop.vertex(1); + } + System.out.printf("Case %d:", i); + } + for (int j = 0; j < loop.numVertices(); ++j) { + System.out.println( + " " + S2EdgeUtil.edgeOrVertexCrossing(orig, dest, loop.vertex(j), loop.vertex(j + 1))); + } + System.out.println(); + } + for (int i = 0; i <= 2; i += 2) { + System.out.printf("Origin->v1 crossing v%d->v1: ", i); + S2Point a = S2.ortho(loop.vertex(1)); + S2Point b = loop.vertex(i); + S2Point c = S2.origin(); + S2Point o = loop.vertex(1); + System.out.printf("%d1R=%d, M1%d=%d, R1M=%d, crosses: %b\n", + i, + S2.robustCCW(b, o, a), + i, + S2.robustCCW(c, o, b), + S2.robustCCW(a, o, c), + S2EdgeUtil.edgeOrVertexCrossing(c, o, b, a)); + } + } +} diff --git a/tests/com/google/common/geometry/S2PolygonBuilderTest.java b/tests/com/google/common/geometry/S2PolygonBuilderTest.java new file mode 100644 index 0000000..9837062 --- /dev/null +++ b/tests/com/google/common/geometry/S2PolygonBuilderTest.java @@ -0,0 +1,449 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.logging.Logger; + +/** + * Tests for {@link S2Loop}. + * + */ +public strictfp class S2PolygonBuilderTest extends GeometryTestCase { + private static final Logger log = Logger.getLogger(S2PolygonBuilderTest.class.getCanonicalName()); + + // A chain represents either a polyline or a loop, depending + // on whether "closed" is true. + private class Chain { + String str; + boolean closed; + + public Chain(String str, boolean closed) { + this.str = str; + this.closed = closed; + } + } + + private class TestCase { + // +1 = undirected, -1 = directed, 0 = either one + int undirectedEdges; + + // +1 = XOR, -1 = don't XOR, 0 = either one + int xorEdges; + + // Minimum and maximum merge distances for this test case in degrees. + double minMerge; + double maxMerge; + + // Each test case consists of a set of input loops and polylines. + Chain[] chainsIn; + + // The expected set of output loops, directed appropriately. + String[] loopsOut; + + // The expected number of unused edges. + int numUnusedEdges; + + public TestCase(int undirectedEdges, + int xorEdges, + double minMerge, + double maxMerge, + Chain[] chainsIn, + String[] loopsOut, + int numUnusedEdges) { + this.undirectedEdges = undirectedEdges; + this.xorEdges = xorEdges; + this.minMerge = minMerge; + this.maxMerge = maxMerge; + this.chainsIn = chainsIn; + this.loopsOut = loopsOut; + this.numUnusedEdges = numUnusedEdges; + } + } + + TestCase[] testCases = new TestCase[] { + // 0: No loops. + new TestCase(0, 0, 0.0, 10.0, new Chain[] {new Chain(null, false)}, new String[] {}, 0), + + // 1: One loop with some extra edges. + new TestCase(0, + 0, + 0.0, + 4.0, + new Chain[] {new Chain("0:0, 0:10, 10:5", true), new Chain("0:0, 5:5", false), + new Chain("10:5, 20:7, 30:10, 40:15, 50:3, 60:-20", false)}, + new String[] {"0:0, 0:10, 10:5"}, + 6), + + // 2: One loop that has an edge removed by XORing, plus lots of + // extra edges. + new TestCase(0, 1, 0.0, 1.0, // XOR + new Chain[] {new Chain("0:0, 0:10, 5:15, 10:10, 10:0", true), + new Chain("10:10, 12:12, 14:14, 16:16, 18:18", false), + new Chain("14:14, 14:16, 14:18, 14:20", false), + new Chain("14:18, 16:20, 18:22", false), + new Chain("18:12, 16:12, 14:12, 12:12", false), + new Chain("20:18, 18:16, 16:14, 14:12", false), + new Chain("20:14, 18:14, 16:14", false), + new Chain("5:15, 0:10", false)}, + new String[] {}, + 21), + + // 3: Three loops (two shells and one hole) that combine into one. + new TestCase(0, 1, 0.0, 4.0, // XOR + new Chain[] {new Chain("0:0, 0:10, 5:10, 10:10, 10:5, 10:0", true), + new Chain("0:10, 0:15, 5:15, 5:10", true), + new Chain("10:10, 5:10, 5:5, 10:5", true), }, + new String[] {"0:0, 0:10, 0:15, 5:15, 5:10, 5:5, 10:5, 10:0"}, + 0), + + // 4: A big CCW triangle contained 3 CW triangular holes. The whole thing + // looks like a pyramid of nine small triangles (with two extra edges). + new TestCase(-1, 0, 0.0, 0.9, // Directed edges required for unique result. + new Chain[] {new Chain("0:0, 0:2, 0:4, 0:6, 1:5, 2:4, 3:3, 2:2, 1:1", true), + new Chain("0:2, 1:1, 1:3", true), + new Chain("0:4, 1:3, 1:5", true), + new Chain("1:3, 2:2, 2:4", true), + new Chain("0:0, 0:1", false), + new Chain("1:3, 5:7", false)}, + new String[] {"0:0, 0:2, 1:1", + "0:2, 0:4, 1:3", + "0:4, 0:6, 1:5", + "1:1, 1:3, 2:2", + "1:3, 1:5, 2:4", + "2:2, 2:4, 3:3"}, + 2), + + // 5: A square divided into four subsquares. In this case we want + // to extract the four loops rather than taking their union. + // There are four extra edges as well. + new TestCase(0, -1, 0.0, 4.0, // Don't XOR + new Chain[] {new Chain("0:0, 0:5, 5:5, 5:0", true), + new Chain("0:5, 0:10, 5:10, 5:5", true), + new Chain("5:0, 5:5, 10:5, 10:0", true), + new Chain("5:5, 5:10, 10:10, 10:5", true), + new Chain("0:10, 0:15, 0:20", false), + new Chain("20:0, 15:0, 10:0", false)}, + new String[] {"0:0, 0:5, 5:5, 5:0", "0:5, 0:10, 5:10, 5:5", "5:0, 5:5, 10:5, 10:0", + "5:5, 5:10, 10:10, 10:5"}, + 4), + + // 6: Five nested loops that touch at a point. + new TestCase(0, + 0, + 0.0, + 0.8, + new Chain[] {new Chain("0:0, 0:10, 10:10, 10:0", true), + new Chain("0:0, 1:9, 9:9, 9:1", true), new Chain("0:0, 2:8, 8:8, 8:2", true), + new Chain("0:0, 3:7, 7:7, 7:3", true), new Chain("0:0, 4:6, 6:6, 6:4", true)}, + new String[] {"0:0, 0:10, 10:10, 10:0", "0:0, 1:9, 9:9, 9:1", "0:0, 2:8, 8:8, 8:2", + "0:0, 3:7, 7:7, 7:3", "0:0, 4:6, 6:6, 6:4"}, + 0), + + + // 7: Four diamonds nested within each other touching at two points. + new TestCase(-1, 0, 0.0, 4.0, // Directed edges required for unique result. + new Chain[] {new Chain("0:-20, -10:0, 0:20, 10:0", true), + new Chain("0:10, -10:0, 0:-10, 10:0", true), + new Chain("0:-10, -5:0, 0:10, 5:0", true), new Chain("0:5, -5:0, 0:-5, 5:0", true)}, + new String[] {"0:-20, -10:0, 0:-10, 10:0", "0:-10, -5:0, 0:-5, 5:0", + "0:5, -5:0, 0:10, 5:0", "0:10, -10:0, 0:20, 10:0"}, + 0), + + // 8: Seven diamonds nested within each other touching at one + // point between each nested pair. + new TestCase(0, + 0, + 0.0, + 9.0, + new Chain[] {new Chain("0:-70, -70:0, 0:70, 70:0", true), + new Chain("0:-70, -60:0, 0:60, 60:0", true), + new Chain("0:-50, -60:0, 0:50, 50:0", true), + new Chain("0:-40, -40:0, 0:50, 40:0", true), + new Chain("0:-30, -30:0, 0:30, 40:0", true), + new Chain("0:-20, -20:0, 0:30, 20:0", true), + new Chain("0:-10, -20:0, 0:10, 10:0", true)}, + new String[] {"0:-70, -70:0, 0:70, 70:0", + "0:-70, -60:0, 0:60, 60:0", + "0:-50, -60:0, 0:50, 50:0", + "0:-40, -40:0, 0:50, 40:0", + "0:-30, -30:0, 0:30, 40:0", + "0:-20, -20:0, 0:30, 20:0", + "0:-10, -20:0, 0:10, 10:0"}, + 0), + + // 9: A triangle and a self-intersecting bowtie. + new TestCase(0, + 0, + 0.0, + 4.0, + new Chain[] {new Chain("0:0, 0:10, 5:5", true), new Chain("0:20, 0:30, 10:20", false), + new Chain("10:20, 10:30, 0:20", false)}, + new String[] {"0:0, 0:10, 5:5"}, + 4), + + // 10: Two triangles that intersect each other. + new TestCase(0, + 0, + 0.0, + 2.0, + new Chain[] {new Chain("0:0, 0:10, 5:5", true), new Chain("2:2, 2:12, 7:7", true)}, + new String[] {}, + 6), + + // 11: Four squares that combine to make a big square. The nominal + // edges of the square are at +/-8.5 degrees in latitude and longitude. + // All vertices except the center vertex are perturbed by up to 0.5 + // degrees in latitude and/or longitude. The various copies of the + // center vertex are misaligned by more than this (i.e. they are + // structured as a tree where adjacent vertices are separated by at + // most 1 degree in latitude and/or longitude) so that the clustering + // algorithm needs more than one iteration to find them all. Note that + // the merged position of this vertex doesn't matter because it is XORed + // away in the output. + new TestCase(0, 1, 1.5, 5.8, // XOR, min_merge > sqrt(2), max_merge < 6. + new Chain[] {new Chain("-8:-8, -8:0", false), + new Chain("-8:1, -8:8", false), + new Chain("0:-9, -2:0", false), + new Chain("-1:1, 1:9", false), + new Chain("0:8, 2:2", false), + new Chain("0:-2, 1:-8", false), + new Chain("8:9, 9:1", false), + new Chain("9:0, 8:-9", false), + new Chain("9:-9, 0:-8", false), + new Chain("1:-9, -9:-9", false), + new Chain("8:0, 1:0", false), + new Chain("1:2, -8:0", false), + new Chain("-8:1, 1:-1", false), + new Chain("0:1, 8:1", false), + new Chain("-9:8, 1:8", false), + new Chain("0:9, 8:8", false)}, + new String[] {"8.5:8.5, 8.5:0.5, 8.5:-8.5, 0.5:-8.5, " + + "-8.5:-8.5, -8.5:0.5, -8.5:8.5, 0.5:8.5"}, + 0)}; + + @Override + public void setUp() { + super.setUp(); + S2Loop.debugMode = true; + S2Polygon.DEBUG = true; + S2Polyline.debugMode = true; + } + + private void getVertices(String str, + S2Point x, + S2Point y, + S2Point z, + double maxPerturbation, + List<S2Point> vertices) { + + // Parse the vertices, perturb them if desired, and transform them into the + // given frame. + S2Polyline line = makePolyline(str); + + for (int i = 0; i < line.numVertices(); ++i) { + S2Point p = line.vertex(i); + // (p[0]*x + p[1]*y + p[2]*z).Normalize() + S2Point axis = S2Point.normalize( + S2Point.add(S2Point.add(S2Point.mul(x, p.x), S2Point.mul(y, p.y)), S2Point.mul(z, p.z))); + S2Cap cap = S2Cap.fromAxisAngle(axis, S1Angle.radians(maxPerturbation)); + vertices.add(samplePoint(cap)); + } + } + + private boolean loopsEqual(S2Loop a, S2Loop b, double maxError) { + // Return true if two loops have the same cyclic vertex sequence. + + if (a.numVertices() != b.numVertices()) { + return false; + } + for (int offset = 0; offset < a.numVertices(); ++offset) { + if (S2.approxEquals(a.vertex(offset), b.vertex(0), maxError)) { + boolean success = true; + for (int i = 0; i < a.numVertices(); ++i) { + if (!S2.approxEquals(a.vertex(i + offset), b.vertex(i), maxError)) { + success = false; + break; + } + } + if (success) { + return true; + } + // Otherwise continue looping. There may be more than one candidate + // starting offset since vertices are only matched approximately. + } + } + return false; + } + + private boolean findLoop(S2Loop loop, List<S2Loop> candidates, double maxError) { + for (int i = 0; i < candidates.size(); ++i) { + if (loopsEqual(loop, candidates.get(i), maxError)) { + return true; + } + } + return false; + } + + boolean findMissingLoops( + List<S2Loop> actual, List<S2Loop> expected, double maxError, String label) { + // Dump any loops from "actual" that are not present in "expected". + boolean found = false; + for (int i = 0; i < actual.size(); ++i) { + if (findLoop(actual.get(i), expected, maxError)) { + continue; + } + System.err.print(label + " loop " + i + ":\n"); + S2Loop loop = actual.get(i); + for (int j = 0; j < loop.numVertices(); ++j) { + S2Point p = loop.vertex(j); + System.err.print(" [" + p.x + ", " + p.y + ", " + p.z + "]\n"); + } + found = true; + } + return found; + } + + void addChain(Chain chain, + S2Point x, + S2Point y, + S2Point z, + double maxPerturbation, + S2PolygonBuilder builder) { + + // Transform the given edge chain to the frame (x,y,z), perturb each vertex + // up to the given distance, and add it to the builder. + + List<S2Point> vertices = Lists.newArrayList(); + getVertices(chain.str, x, y, z, maxPerturbation, vertices); + if (chain.closed) { + vertices.add(vertices.get(0)); + } + for (int i = 1; i < vertices.size(); ++i) { + builder.addEdge(vertices.get(i - 1), vertices.get(i)); + } + } + + boolean evalTristate(int state) { + return (state > 0) ? true : (state < 0) ? false : (rand.nextDouble() > 0.5); + } + + boolean testBuilder(TestCase test) { + for (int iter = 0; iter < 200; ++iter) { + // Initialize to the default options, which are changed below + S2PolygonBuilder.Options options = S2PolygonBuilder.Options.DIRECTED_XOR; + + options.setUndirectedEdges(evalTristate(test.undirectedEdges)); + options.setXorEdges(evalTristate(test.xorEdges)); + + // Each test has a minimum and a maximum merge distance. The merge + // distance must be at least the given minimum to ensure that all expected + // merging will take place, and it must be at most the given maximum to + // ensure that no unexpected merging takes place. + // + // If the minimum and maximum values are different, we have some latitude + // to perturb the vertices as long as the merge distance is adjusted + // appropriately. If "p" is the maximum perturbation distance, "min" and + // "max" are the min/max merge distances, and "m" is the actual merge + // distance for this test, we require that + // + // x >= min + 2*p and x <= max - 2*p . + // + // This implies that p <= 0.25 * (max - min). We choose "p" so that it is + // zero half of the time, and otherwise chosen randomly up to this limit. + + double minMerge = S1Angle.degrees(test.minMerge).radians(); + double maxMerge = S1Angle.degrees(test.maxMerge).radians(); + double r = Math.max(0.0, 2 * rand.nextDouble() - 1); + double maxPerturbation = r * 0.25 * (maxMerge - minMerge); + + // Now we set the merge distance chosen randomly within the limits above + // (min + 2*p and max - 2*p). Half of the time we set the merge distance + // to the minimum value. + + r = Math.max(0.0, 2 * rand.nextDouble() - 1); + options.setMergeDistance(S1Angle.radians( + minMerge + 2 * maxPerturbation + r * (maxMerge - minMerge - 4 * maxPerturbation))); + + options.setValidate(true); + S2PolygonBuilder builder = new S2PolygonBuilder(options); + + // On each iteration we randomly rotate the test case around the sphere. + // This causes the S2PolygonBuilder to choose different first edges when + // trying to build loops. + S2Point x = randomPoint(); + S2Point y = S2Point.normalize(S2Point.crossProd(x, randomPoint())); + S2Point z = S2Point.normalize(S2Point.crossProd(x, y)); + + for (Chain chain : test.chainsIn) { + addChain(chain, x, y, z, maxPerturbation, builder); + } + List<S2Loop> loops = Lists.newArrayList(); + List<S2Edge> unusedEdges = Lists.newArrayList(); + if (test.xorEdges < 0) { + builder.assembleLoops(loops, unusedEdges); + } else { + S2Polygon polygon = new S2Polygon(); + builder.assemblePolygon(polygon, unusedEdges); + polygon.release(loops); + } + List<S2Loop> expected = Lists.newArrayList(); + for (String loop : test.loopsOut) { + List<S2Point> vertices = Lists.newArrayList(); + getVertices(loop, x, y, z, 0, vertices); + expected.add(new S2Loop(vertices)); + } + // We assume that the vertex locations in the expected output polygon + // are separated from the corresponding vertex locations in the input + // edges by at most half of the minimum merge distance. Essentially + // this means that the expected output vertices should be near the + // centroid of the various input vertices. + double maxError = 0.5 * minMerge + maxPerturbation; + + // Note single "|" below so that we print both sets of loops. + if (findMissingLoops(loops, expected, maxError, "Actual") + | findMissingLoops(expected, loops, maxError, "Expected")) { + System.err.print( + "During iteration " + iter + ", undirected: " + options.getUndirectedEdges() + ", xor: " + + options.getXorEdges() + "\n\n"); + return false; + } + if (unusedEdges.size() != test.numUnusedEdges) { + System.err.print("Wrong number of unused edges: " + unusedEdges.size() + "%d (should be " + + test.numUnusedEdges + ")\n"); + return false; + } + } + return true; + } + + public void testAssembleLoops() { + boolean success = true; + for (int i = 0; i < testCases.length; ++i) { + log.info("Starting test case " + i); + + boolean caseSuccess = testBuilder(testCases[i]); + + log.info("Test case " + i + " finished: " + ((caseSuccess) ? "SUCCESS" : "FAILED")); + + success &= caseSuccess; + } + assertTrue(success); + } +} diff --git a/tests/com/google/common/geometry/S2PolygonTest.java b/tests/com/google/common/geometry/S2PolygonTest.java new file mode 100644 index 0000000..89a94ff --- /dev/null +++ b/tests/com/google/common/geometry/S2PolygonTest.java @@ -0,0 +1,345 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.Lists; + +import java.util.List; + +/** + * Tests for {@link S2Polygon}. + * + */ +public strictfp class S2PolygonTest extends GeometryTestCase { + + // A set of nested loops around the point 0:0 (lat:lng). + // Every vertex of NEAR0 is a vertex of NEAR1. + private static final String NEAR0 = "-1:0, 0:1, 1:0, 0:-1;"; + private static final String NEAR1 = "-1:-1, -1:0, -1:1, 0:1, 1:1, 1:0, 1:-1, 0:-1;"; + private static final String NEAR2 = "5:-2, -2:5, -1:-2;"; + private static final String NEAR3 = "6:-3, -3:6, -2:-2;"; + private static final String NEAR_HEMI = "0:-90, -90:0, 0:90, 90:0;"; + + // A set of nested loops around the point 0:180 (lat:lng). + // Every vertex of FAR0 and FAR2 belongs to FAR1, and all + // the loops except FAR2 are non-convex. + private static final String FAR0 = "0:179, 1:180, 0:-179, 2:-180;"; + private static final String FAR1 = + "0:179, -1:179, 1:180, -1:-179, 0:-179, 3:-178, 2:-180, 3:178;"; + private static final String FAR2 = "-1:-179, -1:179, 3:178, 3:-178;"; // opposite + // direction + private static final String FAR3 = "-3:-178, -2:179, -3:178, 4:177, 4:-177;"; + private static final String FAR_HEMI = "0:-90, 60:90, -60:90;"; + + // A set of nested loops around the point -90:0 (lat:lng). + private static final String SOUTH0a = "-90:0, -89.99:0, -89.99:0.01;"; + private static final String SOUTH0b = "-90:0, -89.99:0.02, -89.99:0.03;"; + private static final String SOUTH0c = "-90:0, -89.99:0.04, -89.99:0.05;"; + private static final String SOUTH1 = "-90:0, -89.9:-0.1, -89.9:0.1;"; + private static final String SOUTH2 = "-90:0, -89.8:-0.2, -89.8:0.2;"; + private static final String SOUTH_HEMI = "0:-180, 0:60, 0:-60;"; + + // Two different loops that surround all the Near and Far loops except + // for the hemispheres. + private static final String NEAR_FAR1 = + "-1:-9, -9:-9, -9:9, 9:9, 9:-9, 1:-9, " + "1:-175, 9:-175, 9:175, -9:175, -9:-175, -1:-175;"; + private static final String NEAR_FAR2 = + "-8:-4, 8:-4, 2:15, 2:170, 8:-175, -8:-175, -2:170, -2:15;"; + + // Two rectangles that are "adjacent", but rather than having common edges, + // those edges are slighly off. A third rectangle that is not adjacent to + // either of the first two. + private static final String ADJACENT0 = "0:1, 1:1, 2:1, 2:0, 1:0, 0:0;"; + private static final String ADJACENT1 = "0:2, 1:2, 2:2, 2:1.01, 1:0.99, 0:1.01;"; + private static final String UN_ADJACENT = "10:10, 11:10, 12:10, 12:9, 11:9, 10:9;"; + + // Shapes used to test comparison functions for polygons. + private static final String RECTANGLE1 = "0:1, 1:1, 2:1, 2:0, 1:0, 0:0;"; + private static final String RECTANGLE2 = "5:1, 6:1, 7:1, 7:0, 6:0, 5:0;"; + private static final String TRIANGLE = "15:0, 17:0, 16:2;"; + private static final String TRIANGLE_ROT = "17:0, 16:2, 15:0;"; + + @Override + public void setUp() { + super.setUp(); + S2Loop.debugMode = true; + S2Polygon.DEBUG = true; + } + + private void assertContains(String aStr, String bStr) { + S2Polygon a = makePolygon(aStr); + S2Polygon b = makePolygon(bStr); + assertTrue(a.contains(b)); + } + + // Make sure we've set things up correctly. + public void testInit() { + assertContains(NEAR1, NEAR0); + assertContains(NEAR2, NEAR1); + assertContains(NEAR3, NEAR2); + assertContains(NEAR_HEMI, NEAR3); + assertContains(FAR1, FAR0); + assertContains(FAR2, FAR1); + assertContains(FAR3, FAR2); + assertContains(FAR_HEMI, FAR3); + assertContains(SOUTH1, SOUTH0a); + assertContains(SOUTH1, SOUTH0b); + assertContains(SOUTH1, SOUTH0c); + assertContains(SOUTH_HEMI, SOUTH2); + assertContains(NEAR_FAR1, NEAR3); + assertContains(NEAR_FAR1, FAR3); + assertContains(NEAR_FAR2, NEAR3); + assertContains(NEAR_FAR2, FAR3); + } + + S2Polygon near10 = makePolygon(NEAR0 + NEAR1); + S2Polygon near30 = makePolygon(NEAR3 + NEAR0); + S2Polygon near32 = makePolygon(NEAR2 + NEAR3); + S2Polygon near3210 = makePolygon(NEAR0 + NEAR2 + NEAR3 + NEAR1); + S2Polygon nearH3210 = makePolygon(NEAR0 + NEAR2 + NEAR3 + NEAR_HEMI + NEAR1); + + S2Polygon far10 = makePolygon(FAR0 + FAR1); + S2Polygon far21 = makePolygon(FAR2 + FAR1); + S2Polygon far321 = makePolygon(FAR2 + FAR3 + FAR1); + S2Polygon farH20 = makePolygon(FAR2 + FAR_HEMI + FAR0); + S2Polygon farH3210 = makePolygon(FAR2 + FAR_HEMI + FAR0 + FAR1 + FAR3); + + S2Polygon south0ab = makePolygon(SOUTH0a + SOUTH0b); + S2Polygon south2 = makePolygon(SOUTH2); + S2Polygon south210b = makePolygon(SOUTH2 + SOUTH0b + SOUTH1); + S2Polygon southH21 = makePolygon(SOUTH2 + SOUTH_HEMI + SOUTH1); + S2Polygon southH20abc = makePolygon(SOUTH2 + SOUTH0b + SOUTH_HEMI + SOUTH0a + SOUTH0c); + + S2Polygon nf1n10f2s10abc = + makePolygon(SOUTH0c + FAR2 + NEAR1 + NEAR_FAR1 + NEAR0 + SOUTH1 + SOUTH0b + SOUTH0a); + + S2Polygon nf2n2f210s210ab = + makePolygon(FAR2 + SOUTH0a + FAR1 + SOUTH1 + FAR0 + SOUTH0b + NEAR_FAR2 + SOUTH2 + NEAR2); + + S2Polygon f32n0 = makePolygon(FAR2 + NEAR0 + FAR3); + S2Polygon n32s0b = makePolygon(NEAR3 + SOUTH0b + NEAR2); + + S2Polygon adj0 = makePolygon(ADJACENT0); + S2Polygon adj1 = makePolygon(ADJACENT1); + S2Polygon unAdj = makePolygon(UN_ADJACENT); + + private void assertRelation(S2Polygon a, S2Polygon b, int contains, boolean intersects) { + assertEquals(a.contains(b), contains > 0); + assertEquals(b.contains(a), contains < 0); + assertEquals(a.intersects(b), intersects); + } + + public void testRelations() { + assertRelation(near10, near30, -1, true); + assertRelation(near10, near32, 0, false); + assertRelation(near10, near3210, -1, true); + assertRelation(near10, nearH3210, 0, false); + assertRelation(near30, near32, 1, true); + assertRelation(near30, near3210, 1, true); + assertRelation(near30, nearH3210, 0, true); + assertRelation(near32, near3210, -1, true); + assertRelation(near32, nearH3210, 0, false); + assertRelation(near3210, nearH3210, 0, false); + + assertRelation(far10, far21, 0, false); + assertRelation(far10, far321, -1, true); + assertRelation(far10, farH20, 0, false); + assertRelation(far10, farH3210, 0, false); + assertRelation(far21, far321, 0, false); + assertRelation(far21, farH20, 0, false); + assertRelation(far21, farH3210, -1, true); + assertRelation(far321, farH20, 0, true); + assertRelation(far321, farH3210, 0, true); + assertRelation(farH20, farH3210, 0, true); + + assertRelation(south0ab, south2, -1, true); + assertRelation(south0ab, south210b, 0, true); + assertRelation(south0ab, southH21, -1, true); + assertRelation(south0ab, southH20abc, -1, true); + assertRelation(south2, south210b, 1, true); + assertRelation(south2, southH21, 0, true); + assertRelation(south2, southH20abc, 0, true); + assertRelation(south210b, southH21, 0, true); + assertRelation(south210b, southH20abc, 0, true); + assertRelation(southH21, southH20abc, 1, true); + + assertRelation(nf1n10f2s10abc, nf2n2f210s210ab, 0, true); + assertRelation(nf1n10f2s10abc, near32, 1, true); + assertRelation(nf1n10f2s10abc, far21, 0, false); + assertRelation(nf1n10f2s10abc, south0ab, 0, false); + assertRelation(nf1n10f2s10abc, f32n0, 1, true); + + assertRelation(nf2n2f210s210ab, near10, 0, false); + assertRelation(nf2n2f210s210ab, far10, 1, true); + assertRelation(nf2n2f210s210ab, south210b, 1, true); + assertRelation(nf2n2f210s210ab, south0ab, 1, true); + assertRelation(nf2n2f210s210ab, n32s0b, 1, true); + } + + private void assertPointApproximatelyEquals( + S2Loop s2Loop, int vertexIndex, double lat, double lng, double error) { + S2LatLng latLng = new S2LatLng(s2Loop.vertex(vertexIndex)); + assertDoubleNear(latLng.latDegrees(), lat, error); + assertDoubleNear(latLng.lngDegrees(), lng, error); + } + + private void checkEqual(S2Polygon a, S2Polygon b) { + final double MAX_ERROR = 1e-31; + + if (a.isNormalized() && b.isNormalized()) { + boolean r = a.boundaryApproxEquals(b, MAX_ERROR); + assertTrue(r); + } else { + S2PolygonBuilder builder = new S2PolygonBuilder(S2PolygonBuilder.Options.UNDIRECTED_XOR); + S2Polygon a2 = new S2Polygon(); + S2Polygon b2 = new S2Polygon(); + builder.addPolygon(a); + assertTrue(builder.assemblePolygon(a2, null)); + builder.addPolygon(b); + assertTrue(builder.assemblePolygon(b2, null)); + assertTrue(a2.boundaryApproxEquals(b2, MAX_ERROR)); + } + } + + public void tryUnion(S2Polygon a, S2Polygon b) { + S2Polygon union = new S2Polygon(); + union.initToUnion(a, b); + + List<S2Polygon> polygons = Lists.newArrayList(); + polygons.add(new S2Polygon(a)); + polygons.add(new S2Polygon(b)); + S2Polygon destructiveUnion = S2Polygon.destructiveUnion(polygons); + + checkEqual(union, destructiveUnion); + } + + public void testDisjoint() { + S2PolygonBuilder builder = new S2PolygonBuilder(S2PolygonBuilder.Options.UNDIRECTED_XOR); + builder.addPolygon(adj0); + builder.addPolygon(unAdj); + S2Polygon ab = new S2Polygon(); + assertTrue(builder.assemblePolygon(ab, null)); + + S2Polygon union = new S2Polygon(); + union.initToUnion(adj0, unAdj); + assertEquals(2, union.numLoops()); + + checkEqual(ab, union); + tryUnion(adj0, unAdj); + } + + public void testUnionSloppySuccess() { + List<S2Polygon> polygons = Lists.newArrayList(); + polygons.add(adj0); + polygons.add(adj1); + S2Polygon union = S2Polygon.destructiveUnionSloppy(polygons, S1Angle.degrees(0.1)); + + assertEquals(1, union.numLoops()); + if (union.numLoops() != 1) { + return; + } + S2Loop s2Loop = union.loop(0); + assertEquals(8, s2Loop.numVertices()); + if (s2Loop.numVertices() != 8) { + return; + } + assertPointApproximatelyEquals(s2Loop, 0, 2.0, 0.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 1, 1.0, 0.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 2, 0.0, 0.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 3, 0.0, 1.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 4, 0.0, 2.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 5, 1.0, 2.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 6, 2.0, 2.0, 0.01); + assertPointApproximatelyEquals(s2Loop, 7, 2.0, 1.0, 0.01); + } + + public void testUnionSloppyFailure() { + List<S2Polygon> polygons = Lists.newArrayList(); + polygons.add(adj0); + polygons.add(unAdj); + // The polygons are sufficiently far apart that this angle will not + // bring them together: + S2Polygon union = S2Polygon.destructiveUnionSloppy(polygons, S1Angle.degrees(0.1)); + + assertEquals(2, union.numLoops()); + } + + public void testCompareTo() { + // Polygons with same loops, but in different order: + S2Polygon p1 = makePolygon(RECTANGLE1 + RECTANGLE2); + S2Polygon p2 = makePolygon(RECTANGLE2 + RECTANGLE1); + assertEquals(0, p1.compareTo(p2)); + + // Polygons with same loops, but in different order and containins a + // different number of points. + S2Polygon p3 = makePolygon(RECTANGLE1 + TRIANGLE); + S2Polygon p4 = makePolygon(TRIANGLE + RECTANGLE1); + assertEquals(0, p3.compareTo(p4)); + + // Polygons with same logical loop (but loop is reordered). + S2Polygon p5 = makePolygon(TRIANGLE); + S2Polygon p6 = makePolygon(TRIANGLE_ROT); + assertEquals(0, p5.compareTo(p6)); + + // Polygons with a differing number of loops + S2Polygon p7 = makePolygon(RECTANGLE1 + RECTANGLE2); + S2Polygon p8 = makePolygon(TRIANGLE); + assertTrue(0 > p8.compareTo(p7)); + assertTrue(0 < p7.compareTo(p8)); + + // Polygons with a differing number of loops (one a subset of the other) + S2Polygon p9 = makePolygon(RECTANGLE1 + RECTANGLE2 + TRIANGLE); + S2Polygon p10 = makePolygon(RECTANGLE1 + RECTANGLE2); + assertTrue(0 < p9.compareTo(p10)); + assertTrue(0 > p10.compareTo(p9)); + } + + public void testGetDistance() { + // Error margin since we're doing numerical computations + double epsilon = 1e-15; + + // A rectangle with (lat,lng) vertices (3,1), (3,-1), (-3,-1) and (-3,1) + String inner = "3:1, 3:-1, -3:-1, -3:1;"; + // A larger rectangle with (lat,lng) vertices (4,2), (4,-2), (-4,-2) and + // (-4,s) + String outer = "4:2, 4:-2, -4:-2, -4:2;"; + + + S2Polygon rect = makePolygon(inner); + S2Polygon shell = makePolygon(inner + outer); + + // All of the vertices of a polygon should be distance 0 + for (int i = 0; i < shell.numLoops(); i++) { + for (int j = 0; j < shell.loop(i).numVertices(); j++) { + assertEquals(0d, shell.getDistance(shell.loop(i).vertex(j)).radians(), epsilon); + } + } + + // A non-vertex point on an edge should be distance 0 + assertEquals(0d, rect.getDistance( + S2Point.normalize(S2Point.add(rect.loop(0).vertex(0), rect.loop(0).vertex(1)))).radians(), + epsilon); + + S2Point origin = S2LatLng.fromDegrees(0, 0).toPoint(); + // rect contains the origin + assertEquals(0d, rect.getDistance(origin).radians(), epsilon); + + // shell does NOT contain the origin, since it has a hole. The shortest + // distance is to (1,0) or (-1,0), and should be 1 degree + assertEquals(1d, shell.getDistance(origin).degrees(), epsilon); + } +} diff --git a/tests/com/google/common/geometry/S2PolylineTest.java b/tests/com/google/common/geometry/S2PolylineTest.java new file mode 100644 index 0000000..1827d61 --- /dev/null +++ b/tests/com/google/common/geometry/S2PolylineTest.java @@ -0,0 +1,163 @@ +/* + * Copyright 2006 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.common.geometry; + +import com.google.common.collect.Lists; +import com.google.testing.util.MoreAsserts; + +import java.util.List; + +/** + * Tests for {@link S2Polyline}. + * + */ +public strictfp class S2PolylineTest extends GeometryTestCase { + + @Override + public void setUp() { + super.setUp(); + S2Polyline.debugMode = true; + } + + public void testBasic() { + List<S2Point> vertices = Lists.newArrayList(); + S2Polyline empty = new S2Polyline(vertices); + assertEquals(empty.getRectBound(), S2LatLngRect.empty()); + } + + public void testGetLengthCentroid() { + // Construct random great circles and divide them randomly into segments. + // Then make sure that the length and centroid are correct. Note that + // because of the way the centroid is computed, it does not matter how + // we split the great circle into segments. + + for (int i = 0; i < 100; ++i) { + // Choose a coordinate frame for the great circle. + S2Point x = randomPoint(); + S2Point y = S2Point.normalize(S2Point.crossProd(x, randomPoint())); + S2Point z = S2Point.normalize(S2Point.crossProd(x, y)); + + List<S2Point> vertices = Lists.newArrayList(); + for (double theta = 0; theta < 2 * S2.M_PI; theta += Math.pow(rand.nextDouble(), 10)) { + S2Point p = S2Point.add(S2Point.mul(x, Math.cos(theta)), S2Point.mul(y, Math.sin(theta))); + if (vertices.isEmpty() || !p.equals(vertices.get(vertices.size() - 1))) { + vertices.add(p); + } + } + // Close the circle. + vertices.add(vertices.get(0)); + S2Polyline line = new S2Polyline(vertices); + S1Angle length = line.getArclengthAngle(); + assertTrue(Math.abs(length.radians() - 2 * S2.M_PI) < 2e-14); + } + } + + public void testMayIntersect() { + List<S2Point> vertices = Lists.newArrayList(); + vertices.add(S2Point.normalize(new S2Point(1, -1.1, 0.8))); + vertices.add(S2Point.normalize(new S2Point(1, -0.8, 1.1))); + S2Polyline line = new S2Polyline(vertices); + for (int face = 0; face < 6; ++face) { + S2Cell cell = S2Cell.fromFacePosLevel(face, (byte) 0, 0); + assertEquals(line.mayIntersect(cell), (face & 1) == 0); + } + } + + public void testInterpolate() { + List<S2Point> vertices = Lists.newArrayList(); + vertices.add(new S2Point(1, 0, 0)); + vertices.add(new S2Point(0, 1, 0)); + vertices.add(S2Point.normalize(new S2Point(0, 1, 1))); + vertices.add(new S2Point(0, 0, 1)); + S2Polyline line = new S2Polyline(vertices); + + assertEquals(line.interpolate(-0.1), vertices.get(0)); + assertTrue(S2.approxEquals( + line.interpolate(0.1), S2Point.normalize(new S2Point(1, Math.tan(0.2 * S2.M_PI / 2), 0)))); + assertTrue(S2.approxEquals(line.interpolate(0.25), S2Point.normalize(new S2Point(1, 1, 0)))); + + assertEquals(line.interpolate(0.5), vertices.get(1)); + assertEquals(line.interpolate(0.75), vertices.get(2)); + assertEquals(line.interpolate(1.1), vertices.get(3)); + } + + public void testEqualsAndHashCode() { + List<S2Point> vertices = Lists.newArrayList(); + vertices.add(new S2Point(1, 0, 0)); + vertices.add(new S2Point(0, 1, 0)); + vertices.add(S2Point.normalize(new S2Point(0, 1, 1))); + vertices.add(new S2Point(0, 0, 1)); + + + S2Polyline line1 = new S2Polyline(vertices); + S2Polyline line2 = new S2Polyline(vertices); + + MoreAsserts.checkEqualsAndHashCodeMethods(line1, line2, true); + + List<S2Point> moreVertices = Lists.newLinkedList(vertices); + moreVertices.remove(0); + + S2Polyline line3 = new S2Polyline(moreVertices); + + MoreAsserts.checkEqualsAndHashCodeMethods(line1, line3, false); + MoreAsserts.checkEqualsAndHashCodeMethods(line1, null, false); + MoreAsserts.checkEqualsAndHashCodeMethods(line1, "", false); + } + + public void testProject() { + List<S2Point> latLngs = Lists.newArrayList(); + latLngs.add(S2LatLng.fromDegrees(0, 0).toPoint()); + latLngs.add(S2LatLng.fromDegrees(0, 1).toPoint()); + latLngs.add(S2LatLng.fromDegrees(0, 2).toPoint()); + latLngs.add(S2LatLng.fromDegrees(1, 2).toPoint()); + S2Polyline line = new S2Polyline(latLngs); + + int edgeIndex = -1; + S2Point testPoint = null; + + testPoint = S2LatLng.fromDegrees(0.5, -0.5).toPoint(); + edgeIndex = line.getNearestEdgeIndex(testPoint); + assertTrue(S2.approxEquals( + line.projectToEdge(testPoint, edgeIndex), S2LatLng.fromDegrees(0, 0).toPoint())); + assertEquals(0, edgeIndex); + + testPoint = S2LatLng.fromDegrees(0.5, 0.5).toPoint(); + edgeIndex = line.getNearestEdgeIndex(testPoint); + assertTrue(S2.approxEquals( + line.projectToEdge(testPoint, edgeIndex), S2LatLng.fromDegrees(0, 0.5).toPoint())); + assertEquals(0, edgeIndex); + + testPoint = S2LatLng.fromDegrees(0.5, 1).toPoint(); + edgeIndex = line.getNearestEdgeIndex(testPoint); + assertTrue(S2.approxEquals( + line.projectToEdge(testPoint, edgeIndex), S2LatLng.fromDegrees(0, 1).toPoint())); + assertEquals(0, edgeIndex); + + testPoint = S2LatLng.fromDegrees(-0.5, 2.5).toPoint(); + edgeIndex = line.getNearestEdgeIndex(testPoint); + assertTrue(S2.approxEquals( + line.projectToEdge(testPoint, edgeIndex), S2LatLng.fromDegrees(0, 2).toPoint())); + assertEquals(1, edgeIndex); + + testPoint = S2LatLng.fromDegrees(2, 2).toPoint(); + edgeIndex = line.getNearestEdgeIndex(testPoint); + assertTrue(S2.approxEquals( + line.projectToEdge(testPoint, edgeIndex), S2LatLng.fromDegrees(1, 2).toPoint())); + assertEquals(2, edgeIndex); + } + +} diff --git a/tests/com/google/common/geometry/S2RegionCovererTest.java b/tests/com/google/common/geometry/S2RegionCovererTest.java new file mode 100644 index 0000000..51e3de4 --- /dev/null +++ b/tests/com/google/common/geometry/S2RegionCovererTest.java @@ -0,0 +1,136 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.logging.Logger; + +public strictfp class S2RegionCovererTest extends GeometryTestCase { + private static Logger logger = Logger.getLogger(S2RegionCovererTest.class.getName()); + + public void testRandomCells() { + logger.info("TestRandomCells"); + + S2RegionCoverer coverer = new S2RegionCoverer(); + coverer.setMaxCells(1); + + // Test random cell ids at all levels. + for (int i = 0; i < 10000; ++i) { + S2CellId id = getRandomCellId(); + S2CellUnion covering = new S2CellUnion(); + coverer.getCovering(new S2Cell(id), covering.cellIds()); + assertEquals(covering.size(), 1); + assertEquals(covering.cellId(0), id); + } + } + + public void checkCovering( + S2RegionCoverer coverer, S2Region region, ArrayList<S2CellId> covering, boolean interior) { + + // Keep track of how many cells have the same coverer.min_level() ancestor. + HashMap<S2CellId, Integer> minLevelCells = new HashMap<S2CellId, Integer>(); + for (int i = 0; i < covering.size(); ++i) { + int level = covering.get(i).level(); + assertTrue(level >= coverer.minLevel()); + assertTrue(level <= coverer.maxLevel()); + assertEquals((level - coverer.minLevel()) % coverer.levelMod(), 0); + S2CellId key = covering.get(i).parent(coverer.minLevel()); + if (!minLevelCells.containsKey(key)) { + minLevelCells.put(key, 1); + } else { + minLevelCells.put(key, minLevelCells.get(key) + 1); + } + } + if (covering.size() > coverer.maxCells()) { + // If the covering has more than the requested number of cells, then check + // that the cell count cannot be reduced by using the parent of some cell. + for (Integer i : minLevelCells.values()) { + assertEquals(i.intValue(), 1); + } + } + + if (interior) { + for (int i = 0; i < covering.size(); ++i) { + assertTrue(region.contains(new S2Cell(covering.get(i)))); + } + } else { + S2CellUnion cellUnion = new S2CellUnion(); + cellUnion.initFromCellIds(covering); + checkCovering(region, cellUnion, true, new S2CellId()); + } + } + + public void testRandomCaps() { + logger.info("TestRandomCaps"); + + final int kMaxLevel = S2CellId.MAX_LEVEL; + S2RegionCoverer coverer = new S2RegionCoverer(); + for (int i = 0; i < 1000; ++i) { + do { + coverer.setMinLevel(random(kMaxLevel + 1)); + coverer.setMaxLevel(random(kMaxLevel + 1)); + } while (coverer.minLevel() > coverer.maxLevel()); + coverer.setMaxCells(skewed(10)); + coverer.setLevelMod(1 + random(3)); + double maxArea = Math.min( + 4 * S2.M_PI, (3 * coverer.maxCells() + 1) * S2Cell.averageArea(coverer.minLevel())); + S2Cap cap = getRandomCap(0.1 * S2Cell.averageArea(kMaxLevel), maxArea); + ArrayList<S2CellId> covering = new ArrayList<S2CellId>(); + ArrayList<S2CellId> interior = new ArrayList<S2CellId>(); + + coverer.getCovering(cap, covering); + checkCovering(coverer, cap, covering, false); + + coverer.getInteriorCovering(cap, interior); + checkCovering(coverer, cap, interior, true); + + + // Check that GetCovering is deterministic. + ArrayList<S2CellId> covering2 = new ArrayList<S2CellId>(); + coverer.getCovering(cap, covering2); + assertTrue(covering.equals(covering2)); + + // Also check S2CellUnion.denormalize(). The denormalized covering + // may still be different and smaller than "covering" because + // S2RegionCoverer does not guarantee that it will not output all four + // children of the same parent. + S2CellUnion cells = new S2CellUnion(); + cells.initFromCellIds(covering); + ArrayList<S2CellId> denormalized = new ArrayList<S2CellId>(); + cells.denormalize(coverer.minLevel(), coverer.levelMod(), denormalized); + checkCovering(coverer, cap, denormalized, false); + } + } + + public void testSimpleCoverings() { + logger.info("TestSimpleCoverings"); + + final int kMaxLevel = S2CellId.MAX_LEVEL; + S2RegionCoverer coverer = new S2RegionCoverer(); + coverer.setMaxCells(Integer.MAX_VALUE); + for (int i = 0; i < 1000; ++i) { + int level = random(kMaxLevel + 1); + coverer.setMinLevel(level); + coverer.setMaxLevel(level); + double maxArea = Math.min(4 * S2.M_PI, 1000 * S2Cell.averageArea(level)); + S2Cap cap = getRandomCap(0.1 * S2Cell.averageArea(kMaxLevel), maxArea); + ArrayList<S2CellId> covering = new ArrayList<S2CellId>(); + S2RegionCoverer.getSimpleCovering(cap, cap.axis(), level, covering); + checkCovering(coverer, cap, covering, false); + } + } +} diff --git a/tests/com/google/common/geometry/S2Test.java b/tests/com/google/common/geometry/S2Test.java new file mode 100644 index 0000000..f94f594 --- /dev/null +++ b/tests/com/google/common/geometry/S2Test.java @@ -0,0 +1,305 @@ +/* + * Copyright 2005 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.geometry; + +import java.util.logging.Logger; + +public strictfp class S2Test extends GeometryTestCase { + + private static Logger logger = Logger.getLogger(S2Test.class.getName()); + + static int swapAxes(int ij) { + return ((ij >> 1) & 1) + ((ij & 1) << 1); + } + + static int invertBits(int ij) { + return ij ^ 3; + } + + public void testTraversalOrder() { + for (int r = 0; r < 4; ++r) { + for (int i = 0; i < 4; ++i) { + // Check consistency with respect to swapping axes. + assertEquals( + S2Projections.IJ_TO_POS[r][i], S2Projections.IJ_TO_POS[r ^ S2.SWAP_MASK][swapAxes(i)]); + assertEquals(S2.POS_TO_IJ[r][i], swapAxes(S2.POS_TO_IJ[r ^ S2.SWAP_MASK][i])); + + // Check consistency with respect to reversing axis directions. + assertEquals(S2Projections.IJ_TO_POS[r][i], + S2Projections.IJ_TO_POS[r ^ S2.INVERT_MASK][invertBits(i)]); + assertEquals(S2.POS_TO_IJ[r][i], invertBits(S2.POS_TO_IJ[r ^ S2.INVERT_MASK][i])); + + // Check that the two tables are inverses of each other. + assertEquals(S2Projections.IJ_TO_POS[r][S2.POS_TO_IJ[r][i]], i); + assertEquals(S2.POS_TO_IJ[r][S2Projections.IJ_TO_POS[r][i]], i); + } + } + } + + public void testSTUV() { + // Check boundary conditions. + for (double x = -1; x <= 1; ++x) { + assertEquals(S2Projections.stToUV(x), x); + assertEquals(S2Projections.uvToST(x), x); + } + // Check that UVtoST and STtoUV are inverses. + for (double x = -1; x <= 1; x += 0.0001) { + assertDoubleNear(S2Projections.uvToST(S2Projections.stToUV(x)), x); + assertDoubleNear(S2Projections.stToUV(S2Projections.uvToST(x)), x); + } + } + + public void testFaceUVtoXYZ() { + // Check that each face appears exactly once. + S2Point sum = new S2Point(); + for (int face = 0; face < 6; ++face) { + S2Point center = S2Projections.faceUvToXyz(face, 0, 0); + assertEquals(S2Projections.getNorm(face), center); + assertEquals(Math.abs(center.get(center.largestAbsComponent())), 1.0); + sum = S2Point.add(sum, S2Point.fabs(center)); + } + assertEquals(sum, new S2Point(2, 2, 2)); + + // Check that each face has a right-handed coordinate system. + for (int face = 0; face < 6; ++face) { + assertEquals( + S2Point.crossProd(S2Projections.getUAxis(face), S2Projections.getVAxis(face)).dotProd( + S2Projections.faceUvToXyz(face, 0, 0)), 1.0); + } + + // Check that the Hilbert curves on each face combine to form a + // continuous curve over the entire cube. + for (int face = 0; face < 6; ++face) { + // The Hilbert curve on each face starts at (-1,-1) and terminates + // at either (1,-1) (if axes not swapped) or (-1,1) (if swapped). + int sign = ((face & S2.SWAP_MASK) != 0) ? -1 : 1; + assertEquals(S2Projections.faceUvToXyz(face, sign, -sign), + S2Projections.faceUvToXyz((face + 1) % 6, -1, -1)); + } + } + + public void testUVNorms() { + // Check that GetUNorm and GetVNorm compute right-handed normals for + // an edge in the increasing U or V direction. + for (int face = 0; face < 6; ++face) { + for (double x = -1; x <= 1; x += 1 / 1024.) { + assertDoubleNear( + S2Point.crossProd( + S2Projections.faceUvToXyz(face, x, -1), S2Projections.faceUvToXyz(face, x, 1)) + .angle(S2Projections.getUNorm(face, x)), 0); + assertDoubleNear( + S2Point.crossProd( + S2Projections.faceUvToXyz(face, -1, x), S2Projections.faceUvToXyz(face, 1, x)) + .angle(S2Projections.getVNorm(face, x)), 0); + } + } + } + + public void testUVAxes() { + // Check that axes are consistent with FaceUVtoXYZ. + for (int face = 0; face < 6; ++face) { + assertEquals(S2Projections.getUAxis(face), S2Point.sub( + S2Projections.faceUvToXyz(face, 1, 0), S2Projections.faceUvToXyz(face, 0, 0))); + assertEquals(S2Projections.getVAxis(face), S2Point.sub( + S2Projections.faceUvToXyz(face, 0, 1), S2Projections.faceUvToXyz(face, 0, 0))); + } + } + + public void testAngleArea() { + S2Point pz = new S2Point(0, 0, 1); + S2Point p000 = new S2Point(1, 0, 0); + S2Point p045 = new S2Point(1, 1, 0); + S2Point p090 = new S2Point(0, 1, 0); + S2Point p180 = new S2Point(-1, 0, 0); + assertDoubleNear(S2.angle(p000, pz, p045), S2.M_PI_4); + assertDoubleNear(S2.angle(p045, pz, p180), 3 * S2.M_PI_4); + assertDoubleNear(S2.angle(p000, pz, p180), S2.M_PI); + assertDoubleNear(S2.angle(pz, p000, pz), 0); + assertDoubleNear(S2.angle(pz, p000, p045), S2.M_PI_2); + + assertDoubleNear(S2.area(p000, p090, pz), S2.M_PI_2); + assertDoubleNear(S2.area(p045, pz, p180), 3 * S2.M_PI_4); + + // Make sure that area() has good *relative* accuracy even for + // very small areas. + final double eps = 1e-10; + S2Point pepsx = new S2Point(eps, 0, 1); + S2Point pepsy = new S2Point(0, eps, 1); + double expected1 = 0.5 * eps * eps; + assertDoubleNear(S2.area(pepsx, pepsy, pz), expected1, 1e-14 * expected1); + + // Make sure that it can handle degenerate triangles. + S2Point pr = new S2Point(0.257, -0.5723, 0.112); + S2Point pq = new S2Point(-0.747, 0.401, 0.2235); + assertEquals(S2.area(pr, pr, pr), 0.0); + // TODO: The following test is not exact in optimized mode because the + // compiler chooses to mix 64-bit and 80-bit intermediate results. + assertDoubleNear(S2.area(pr, pq, pr), 0); + assertEquals(S2.area(p000, p045, p090), 0.0); + + double maxGirard = 0; + for (int i = 0; i < 10000; ++i) { + S2Point p0 = randomPoint(); + S2Point d1 = randomPoint(); + S2Point d2 = randomPoint(); + S2Point p1 = S2Point.add(p0, S2Point.mul(d1, 1e-15)); + S2Point p2 = S2Point.add(p0, S2Point.mul(d2, 1e-15)); + // The actual displacement can be as much as 1.2e-15 due to roundoff. + // This yields a maximum triangle area of about 0.7e-30. + assertTrue(S2.area(p0, p1, p2) < 0.7e-30); + maxGirard = Math.max(maxGirard, S2.girardArea(p0, p1, p2)); + } + logger.info("Worst case Girard for triangle area 1e-30: " + maxGirard); + + // Try a very long and skinny triangle. + S2Point p045eps = new S2Point(1, 1, eps); + double expected2 = 5.8578643762690495119753e-11; // Mathematica. + assertDoubleNear(S2.area(p000, p045eps, p090), expected2, 1e-9 * expected2); + + // Triangles with near-180 degree edges that sum to a quarter-sphere. + final double eps2 = 1e-10; + S2Point p000eps2 = new S2Point(1, 0.1 * eps2, eps2); + double quarterArea1 = + S2.area(p000eps2, p000, p090) + S2.area(p000eps2, p090, p180) + S2.area(p000eps2, p180, pz) + + S2.area(p000eps2, pz, p000); + assertDoubleNear(quarterArea1, S2.M_PI); + + // Four other triangles that sum to a quarter-sphere. + S2Point p045eps2 = new S2Point(1, 1, eps2); + double quarterArea2 = + S2.area(p045eps2, p000, p090) + S2.area(p045eps2, p090, p180) + S2.area(p045eps2, p180, pz) + + S2.area(p045eps2, pz, p000); + assertDoubleNear(quarterArea2, S2.M_PI); + } + + public void testCCW() { + S2Point a = new S2Point(0.72571927877036835, 0.46058825605889098, 0.51106749730504852); + S2Point b = new S2Point(0.7257192746638208, 0.46058826573818168, 0.51106749441312738); + S2Point c = new S2Point(0.72571927671709457, 0.46058826089853633, 0.51106749585908795); + assertTrue(S2.robustCCW(a, b, c) != 0); + } + + // Note: obviously, I could have defined a bundle of metrics like this in the + // S2 class itself rather than just for testing. However, it's not clear that + // this is useful other than for testing purposes, and I find + // S2.kMinWidth.GetMaxLevel(width) to be slightly more readable than + // than S2.kWidth.min().GetMaxLevel(width). Also, there is no fundamental + // reason that we need to analyze the minimum, maximum, and average values of + // every metric; it would be perfectly reasonable to just define one of these. + + class MetricBundle { + public MetricBundle(S2.Metric min, S2.Metric max, S2.Metric avg) { + this.min_ = min; + this.max_ = max; + this.avg_ = avg; + } + + S2.Metric min_; + S2.Metric max_; + S2.Metric avg_; + } + + public void testMinMaxAvg(MetricBundle bundle) { + assertTrue(bundle.min_.deriv() < bundle.avg_.deriv()); + assertTrue(bundle.avg_.deriv() < bundle.max_.deriv()); + } + + public void testLessOrEqual(MetricBundle a, MetricBundle b) { + assertTrue(a.min_.deriv() <= b.min_.deriv()); + assertTrue(a.max_.deriv() <= b.max_.deriv()); + assertTrue(a.avg_.deriv() <= b.avg_.deriv()); + } + + public void testMetrics() { + + MetricBundle angleSpan = new MetricBundle( + S2Projections.MIN_ANGLE_SPAN, S2Projections.MAX_ANGLE_SPAN, S2Projections.AVG_ANGLE_SPAN); + MetricBundle width = + new MetricBundle(S2Projections.MIN_WIDTH, S2Projections.MAX_WIDTH, S2Projections.AVG_WIDTH); + MetricBundle edge = + new MetricBundle(S2Projections.MIN_EDGE, S2Projections.MAX_EDGE, S2Projections.AVG_EDGE); + MetricBundle diag = + new MetricBundle(S2Projections.MIN_DIAG, S2Projections.MAX_DIAG, S2Projections.AVG_DIAG); + MetricBundle area = + new MetricBundle(S2Projections.MIN_AREA, S2Projections.MAX_AREA, S2Projections.AVG_AREA); + + // First, check that min <= avg <= max for each metric. + testMinMaxAvg(angleSpan); + testMinMaxAvg(width); + testMinMaxAvg(edge); + testMinMaxAvg(diag); + testMinMaxAvg(area); + + // Check that the maximum aspect ratio of an individual cell is consistent + // with the global minimums and maximums. + assertTrue(S2Projections.MAX_EDGE_ASPECT >= 1.0); + assertTrue(S2Projections.MAX_EDGE_ASPECT + < S2Projections.MAX_EDGE.deriv() / S2Projections.MIN_EDGE.deriv()); + assertTrue(S2Projections.MAX_DIAG_ASPECT >= 1); + assertTrue(S2Projections.MAX_DIAG_ASPECT + < S2Projections.MAX_DIAG.deriv() / S2Projections.MIN_DIAG.deriv()); + + // Check various conditions that are provable mathematically. + testLessOrEqual(width, angleSpan); + testLessOrEqual(width, edge); + testLessOrEqual(edge, diag); + + assertTrue(S2Projections.MIN_AREA.deriv() + >= S2Projections.MIN_WIDTH.deriv() * S2Projections.MIN_EDGE.deriv() - 1e-15); + assertTrue(S2Projections.MAX_AREA.deriv() + < S2Projections.MAX_WIDTH.deriv() * S2Projections.MAX_EDGE.deriv() + 1e-15); + + // GetMinLevelForLength() and friends have built-in assertions, we just need + // to call these functions to test them. + // + // We don't actually check that the metrics are correct here, e.g. that + // GetMinWidth(10) is a lower bound on the width of cells at level 10. + // It is easier to check these properties in s2cell_unittest, since + // S2Cell has methods to compute the cell vertices, etc. + + for (int level = -2; level <= S2CellId.MAX_LEVEL + 3; ++level) { + double dWidth = (2 * S2Projections.MIN_WIDTH.deriv()) * Math.pow(2, -level); + if (level >= S2CellId.MAX_LEVEL + 3) { + dWidth = 0; + } + + // Check boundary cases (exactly equal to a threshold value). + int expectedLevel = Math.max(0, Math.min(S2CellId.MAX_LEVEL, level)); + assertEquals(S2Projections.MIN_WIDTH.getMinLevel(dWidth), expectedLevel); + assertEquals(S2Projections.MIN_WIDTH.getMaxLevel(dWidth), expectedLevel); + assertEquals(S2Projections.MIN_WIDTH.getClosestLevel(dWidth), expectedLevel); + + // Also check non-boundary cases. + assertEquals(S2Projections.MIN_WIDTH.getMinLevel(1.2 * dWidth), expectedLevel); + assertEquals(S2Projections.MIN_WIDTH.getMaxLevel(0.8 * dWidth), expectedLevel); + assertEquals(S2Projections.MIN_WIDTH.getClosestLevel(1.2 * dWidth), expectedLevel); + assertEquals(S2Projections.MIN_WIDTH.getClosestLevel(0.8 * dWidth), expectedLevel); + + // Same thing for area1. + double area1 = (4 * S2Projections.MIN_AREA.deriv()) * Math.pow(4, -level); + if (level <= -3) { + area1 = 0; + } + assertEquals(S2Projections.MIN_AREA.getMinLevel(area1), expectedLevel); + assertEquals(S2Projections.MIN_AREA.getMaxLevel(area1), expectedLevel); + assertEquals(S2Projections.MIN_AREA.getClosestLevel(area1), expectedLevel); + assertEquals(S2Projections.MIN_AREA.getMinLevel(1.2 * area1), expectedLevel); + assertEquals(S2Projections.MIN_AREA.getMaxLevel(0.8 * area1), expectedLevel); + assertEquals(S2Projections.MIN_AREA.getClosestLevel(1.2 * area1), expectedLevel); + assertEquals(S2Projections.MIN_AREA.getClosestLevel(0.8 * area1), expectedLevel); + } + } +} diff --git a/tests/com/google/testing/util/MoreAsserts.java b/tests/com/google/testing/util/MoreAsserts.java new file mode 100644 index 0000000..e4a8d37 --- /dev/null +++ b/tests/com/google/testing/util/MoreAsserts.java @@ -0,0 +1,207 @@ +/* + * Copyright 2011 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.testing.util; + +import static java.util.Arrays.asList; + +import com.google.common.base.Objects; +import com.google.common.collect.HashMultiset; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import java.util.Comparator; + +import junit.framework.Assert; + +import java.util.Iterator; +import java.util.List; + +public final class MoreAsserts { + + private MoreAsserts() { } + + /** + * Asserts that {@code actual} contains precisely the elements + * {@code expected}, in any order. Both collections may contain + * duplicates, and this method will only pass if the quantities are + * exactly the same. + */ + public static void assertContentsAnyOrder( + String message, Iterable<?> actual, Object... expected) { + assertEqualsImpl(message, + HashMultiset.create(asList(expected)), HashMultiset.create(actual)); + } + + /** + * Variant of {@link #assertContentsAnyOrder(String,Iterable,Object...)} + * using a generic message. + */ + public static void assertContentsAnyOrder( + Iterable<?> actual, Object... expected) { + assertContentsAnyOrder((String) null, actual, expected); + } + + /** + * Asserts that {@code actual} contains precisely the elements + * {@code expected}, in any order. Both collections may contain + * duplicates, and this method will only pass if the quantities are + * exactly the same. This method uses the user-provided Comparator + * object for doing the object comparison, instead of relying on the + * contents' implementation of {@link Object#equals(Object)}. It also takes + * in the expected set of objects as an Iterable. + * <p> + * Note the different order of expected and actual from the other + * {@link #assertContentsAnyOrder(String,Iterable,Object...)} + */ + public static <T> void assertContentsAnyOrder(String message, + Iterable<? extends T> expected, Iterable<? extends T> actual, + Comparator<? super T> comparator) { + // We should not iterate over an Iterable more than once. There's + // no guarentees that Iterable.iterator() returns an iterator over the + // entire collection every time. + // + // Why don't we use TreeMultiset? Unfortunately, TreeMultiset.toString() + // produces really odd output for duplicates. In addition, our contract + // states that we use the comparator to compare equality, not to order + // items. + ImmutableList<T> actualList = ImmutableList.copyOf(actual); + ImmutableList<T> expectedList = ImmutableList.copyOf(expected); + + // First compare sizes to save ourselves on N X M operation. + // This also handles the case where "expected" is a subset of "actual". + if (actualList.size() != expectedList.size()) { + failNotEqual(message, expectedList, actualList); + } + + // Now for each expected value, iterate through actuals and delete entry + // if found. We need to make another copy of the "actual" items because + // we will be removing items from this list, and we need to keep the original + // for the failure message. + List<T> unfoundItems = Lists.newLinkedList(actualList); + for (T ex : expectedList) { + boolean found = false; + Iterator<T> iter = unfoundItems.iterator(); + while (iter.hasNext()) { + T ac = iter.next(); + if (comparator.compare(ex, ac) == 0) { + iter.remove(); + found = true; + break; + } + } + if (!found) { + failNotEqual(message, expectedList, actualList); + } + } + } + + /** + * Variant of {@link #assertContentsAnyOrder(String,Iterable,Object...)} + * using a generic message. + */ + public static <T> void assertContentsAnyOrder( + Iterable<? extends T> expected, Iterable<? extends T> actual, + Comparator<? super T> comparator) { + assertContentsAnyOrder((String) null, expected, actual, comparator); + } + + + /** + * Utility for testing equals() and hashCode() results at once. + * Tests that lhs.equals(rhs) matches expectedResult, as well as + * rhs.equals(lhs). Also tests that hashCode() return values are + * equal if expectedResult is true. (hashCode() is not tested if + * expectedResult is false, as unequal objects can have equal hashCodes.) + * + * @param lhs An Object for which equals() and hashCode() are to be tested. + * @param rhs As lhs. + * @param expectedResult True if the objects should compare equal, + * false if not. + */ + public static void checkEqualsAndHashCodeMethods( + String message, Object lhs, Object rhs, boolean expectedResult) { + + if ((lhs == null) && (rhs == null)) { + Assert.assertTrue( + "Your check is dubious...why would you expect null != null?", + expectedResult); + return; + } + + if ((lhs == null) || (rhs == null)) { + Assert.assertFalse( + "Your check is dubious...why would you expect an object " + + "to be equal to null?", expectedResult); + } + + if (lhs != null) { + assertEqualsImpl(message, expectedResult, lhs.equals(rhs)); + } + if (rhs != null) { + assertEqualsImpl(message, expectedResult, rhs.equals(lhs)); + } + + if (expectedResult) { + String hashMessage = + "hashCode() values for equal objects should be the same"; + if (message != null) { + hashMessage += ": " + message; + } + Assert.assertTrue(hashMessage, lhs.hashCode() == rhs.hashCode()); + } + } + + /** + * Variant of + * {@link #checkEqualsAndHashCodeMethods(String, Object, Object, boolean)} + * using a generic message. + */ + public static void checkEqualsAndHashCodeMethods(Object lhs, Object rhs, + boolean expectedResult) { + checkEqualsAndHashCodeMethods((String) null, lhs, rhs, expectedResult); + } + + private static void failNotEqual(String message, Object expected, + Object actual) { + if ((expected != null) && (actual != null) + && expected.toString().equals(actual.toString())) { + failWithMessage(message, "expected:<(" + + expected.getClass().getName() + ") " + expected + "> but was:<(" + + actual.getClass().getName() + ") " + actual + ">"); + } else { + failWithMessage(message, "expected:<" + expected + "> but was:<" + actual + + ">"); + } + } + + /** + * Replacement of {@link Assert#assertEquals} which provides the same error + * message in GWT and java. + */ + private static void assertEqualsImpl( + String message, Object expected, Object actual) { + if (!Objects.equal(expected, actual)) { + failWithMessage( + message, "expected:<" + expected + "> but was:<" + actual + ">"); + } + } + + private static void failWithMessage(String userMessage, String ourMessage) { + Assert.fail((userMessage == null) + ? ourMessage + : userMessage + ' ' + ourMessage); + } +} |