From 35c6fac75bc12be1bfb891a23f7b05a95ae906b2 Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Tue, 20 May 2025 12:19:16 -0400 Subject: [PATCH 1/9] Add test for colinear lat/lon --- test/test_spatial_hashing.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/test/test_spatial_hashing.py b/test/test_spatial_hashing.py index d714b68ef..bbe1cc692 100644 --- a/test/test_spatial_hashing.py +++ b/test/test_spatial_hashing.py @@ -141,3 +141,12 @@ def test_list_of_coords_mpas_primal(): assert bcoords.shape[0] == num_particles assert bcoords.shape[1] == 6 # max sides of an element assert np.all(face_ids >= 0) # All particles should be inside an element + +def test_colinear_latlon(): + """Verifies valid spatial hashing for colinear points in latlon coordinates""" + + from uxarray.grid.neighbors import _barycentric_coordinates + + polygon = np.array([[-45, 87.87916205], [45, 87.87916205], [135, 87.87916205], [-135, 87.87916205]]) + point = np.array([-45, -87.87916205]) + weights = _barycentric_coordinates(polygon, point) From 55d1bd3fd70e43fa1a6f8ea58479894f86a29ea6 Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Tue, 20 May 2025 13:29:49 -0400 Subject: [PATCH 2/9] Add function for computing barycentric coordinates using cartesian positions --- test/test_spatial_hashing.py | 17 ++++++++-- uxarray/grid/neighbors.py | 60 ++++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+), 3 deletions(-) diff --git a/test/test_spatial_hashing.py b/test/test_spatial_hashing.py index bbe1cc692..3fce348fe 100644 --- a/test/test_spatial_hashing.py +++ b/test/test_spatial_hashing.py @@ -142,11 +142,22 @@ def test_list_of_coords_mpas_primal(): assert bcoords.shape[1] == 6 # max sides of an element assert np.all(face_ids >= 0) # All particles should be inside an element -def test_colinear_latlon(): +def test_barycentric_coordinates_colinear_latlon(): """Verifies valid spatial hashing for colinear points in latlon coordinates""" - from uxarray.grid.neighbors import _barycentric_coordinates + from uxarray.grid.neighbors import _barycentric_coordinates, _barycentric_coordinates_cartesian + from uxarray.grid.coordinates import _lonlat_rad_to_xyz polygon = np.array([[-45, 87.87916205], [45, 87.87916205], [135, 87.87916205], [-135, 87.87916205]]) point = np.array([-45, -87.87916205]) - weights = _barycentric_coordinates(polygon, point) + #weights = _barycentric_coordinates(polygon, point) + + # Convert to Cartesian coordinates + polygon_rad = np.deg2rad(polygon) + point_rad = np.deg2rad(point) + x, y, z = _lonlat_rad_to_xyz(polygon_rad[:,0], polygon_rad[:,1]) + polygon_cartesian = np.array([x, y, z]).T + x, y, z = _lonlat_rad_to_xyz(point_rad[0], point_rad[1]) + point_cartesian = np.array([x, y, z]) + weights = _barycentric_coordinates_cartesian(polygon_cartesian, point_cartesian) + print(weights) diff --git a/uxarray/grid/neighbors.py b/uxarray/grid/neighbors.py index d836756a2..d602e3e38 100644 --- a/uxarray/grid/neighbors.py +++ b/uxarray/grid/neighbors.py @@ -793,6 +793,8 @@ class SpatialHash: ---------- grid : ux.Grid Source grid used to construct the hash grid and hash table + coordinate_system : str, default="spherical" + Sets the coordinate type used for barycentric coordinate. reconstruct : bool, default=False If true, reconstructs the spatial hash @@ -805,8 +807,17 @@ def __init__( self, grid, reconstruct: bool = False, + coordinate_system: Optional[str] = "spherical", ): self._source_grid = grid + + if coordinate_system not in ["cartesian", "spherical"]: + raise TypeError( + f"Unknown coordinate_system, {coordinate_system}, use either 'cartesian' or " + f"'spherical'" + ) + + self.coordinate_system = coordinate_system self._nelements = self._source_grid.n_face self.reconstruct = reconstruct @@ -963,6 +974,17 @@ def _triangle_area(A, B, C): return 0.5 * abs(A[0] * (B[1] - C[1]) + B[0] * (C[1] - A[1]) + C[0] * (A[1] - B[1])) +@njit(cache=True) +def _triangle_area_cartesian(A, B, C): + """ + Compute the area of a triangle given by three points. + """ + d1 = B - A + d2 = C - A + d3 = np.cross(d1, d2) + return 0.5 * np.linalg.norm(d3) + + @njit(cache=True) def _barycentric_coordinates(nodes, point): """ @@ -1001,6 +1023,44 @@ def _barycentric_coordinates(nodes, point): return barycentric_coords +@njit(cache=True) +def _barycentric_coordinates_cartesian(nodes, point): + """ + Compute the barycentric coordinates of a point P inside a convex polygon using area-based weights. + So that this method generalizes to n-sided polygons, we use the Waschpress points as the generalized + barycentric coordinates, which is only valid for convex polygons. + + Parameters + ---------- + nodes : numpy.ndarray + Cartesian coordinates (x,y,z) of each corner node of a face + point : numpy.ndarray + Cartesian coordinates (x,y,z) of the point + Returns + ------- + numpy.ndarray + Barycentric coordinates corresponding to each vertex. + + """ + n = len(nodes) + sum_wi = 0 + w = [] + + for i in range(0, n): + vim1 = nodes[i - 1] + vi = nodes[i] + vi1 = nodes[(i + 1) % n] + a0 = _triangle_area_cartesian(vim1, vi, vi1) + a1 = max(_triangle_area_cartesian(point, vim1, vi), ERROR_TOLERANCE) + a2 = max(_triangle_area_cartesian(point, vi, vi1), ERROR_TOLERANCE) + sum_wi += a0 / (a1 * a2) + w.append(a0 / (a1 * a2)) + + barycentric_coords = [w_i / sum_wi for w_i in w] + + return barycentric_coords + + def _prepare_xy_for_query(xy, use_radians, distance_metric): """Prepares xy coordinates for query with the sklearn BallTree or KDTree.""" From 5a94285ae4e07f0e7bae1db6ce913e85cf375dd6 Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Wed, 21 May 2025 13:11:59 -0400 Subject: [PATCH 3/9] Add support for computing barycentric coordinates from cartesian coords --- uxarray/grid/neighbors.py | 159 ++++++++++++++++++++++++++++++++------ 1 file changed, 136 insertions(+), 23 deletions(-) diff --git a/uxarray/grid/neighbors.py b/uxarray/grid/neighbors.py index d602e3e38..7ef52b8cc 100644 --- a/uxarray/grid/neighbors.py +++ b/uxarray/grid/neighbors.py @@ -817,6 +817,18 @@ def __init__( f"'spherical'" ) + if coordinate_system == "cartesian": + if self._source_grid.n_max_face_nodes != np.mean( + self._source_grid.n_nodes_per_face + ): + raise ValueError( + "SpatialHash with cartesian geometry only supports grids with a uniform face geometry type" + ) + self._ll_tolerance = self._local_linearization_tolerance() + + else: + self._ll_tolerance = 0.0 + self.coordinate_system = coordinate_system self._nelements = self._source_grid.n_face @@ -903,6 +915,54 @@ def _initialize_face_hash_table(self): return index_to_face + def _local_linearization_tolerance(self): + """ + Computes the upper bound of the local projection error that arises from + assuming planar faces. + + Effectively, a planar face on a spherical manifold is a local linearization + of the spherical coordinate transformation. Since query points and nodes are converted to + cartesian coordinates using the full spherical coordinate transformation, + the local projection error will likely be non-zero but related to the discretiztaion. + + When computing the local linearization, we assume a planar face, whose normal is formed + by the cross product of two vectors extending from the first node in the face. + The upper bound on the local linearization error can be estimated using the projection + error of the face center onto this plane. + """ + # Get grid variables + face_node_connectivity = ( + self._source_grid.face_node_connectivity.to_numpy() + ) # shape (n_face, max_nodes_per_face) + + # Assumes all elements are the same geoemetric type + nodes = np.stack( + ( + self._source_grid.node_x.to_numpy()[face_node_connectivity], + self._source_grid.node_y.to_numpy()[face_node_connectivity], + self._source_grid.node_z.to_numpy()[face_node_connectivity], + ), + axis=-1, + ) # shape (n_face, N, 3) + + face_centers = np.stack( + ( + self._source_grid.face_x.to_numpy(), + self._source_grid.face_y.to_numpy(), + self._source_grid.face_z.to_numpy(), + ), + axis=-1, + ) # shape (n_face, 3) + + ll_tolerance = np.array( + [ + _local_projection_error(nodes[i], face_centers[i]) + for i in range(self._source_grid.n_face) + ] + ) + + return ll_tolerance + def query( self, coords: Union[np.ndarray, list, tuple], @@ -928,7 +988,13 @@ def query( Barycentric coordinates of each coords element """ - coords = _prepare_xy_for_query(coords, in_radians, distance_metric=None) + if self.coordinate_system == "spherical": + coords = _prepare_xy_for_query(coords, in_radians, distance_metric=None) + else: + # TODO: allow user to specify either spherical or cartesian query coordinates. + # If the self.coordinate_system is cartesian, then the coords should be cartesian + coords = _prepare_xyz_for_query(coords) + num_coords = coords.shape[0] max_nodes = self._source_grid.n_max_face_nodes @@ -940,28 +1006,57 @@ def query( n_nodes_per_face = self._source_grid.n_nodes_per_face.to_numpy() face_node_connectivity = self._source_grid.face_node_connectivity.to_numpy() - # Precompute radian values for node coordinates: - node_lon = np.deg2rad(self._source_grid.node_lon.to_numpy()) - node_lat = np.deg2rad(self._source_grid.node_lat.to_numpy()) - - # Get the list of candidate faces for each coordinate - candidate_faces = [ - self._face_hash_table[pid] for pid in self._hash_index(coords) - ] - - for i, (coord, candidates) in enumerate(zip(coords, candidate_faces)): - for face_id in candidates: - n_nodes = n_nodes_per_face[face_id] - node_ids = face_node_connectivity[face_id, :n_nodes] - nodes = np.column_stack((node_lon[node_ids], node_lat[node_ids])) - bcoord = np.asarray(_barycentric_coordinates(nodes, coord)) - err = abs(np.dot(bcoord, nodes[:, 0]) - coord[0]) + abs( - np.dot(bcoord, nodes[:, 1]) - coord[1] - ) - if (bcoord >= 0).all() and err < tol: - faces[i] = face_id - bcoords[i, :n_nodes] = bcoord[:n_nodes] - break + if self.coordinate_system == "spherical": + # Precompute radian values for node coordinates: + node_lon = np.deg2rad(self._source_grid.node_lon.to_numpy()) + node_lat = np.deg2rad(self._source_grid.node_lat.to_numpy()) + + # Get the list of candidate faces for each coordinate + candidate_faces = [ + self._face_hash_table[pid] for pid in self._hash_index(coords) + ] + + if self.coordinate_system == "spherical": + for i, (coord, candidates) in enumerate(zip(coords, candidate_faces)): + for face_id in candidates: + n_nodes = n_nodes_per_face[face_id] + node_ids = face_node_connectivity[face_id, :n_nodes] + nodes = np.column_stack( + (node_lon[node_ids], node_lat[node_ids]) + ) + bcoord = np.asarray(_barycentric_coordinates(nodes, coord)) + err = abs(np.dot(bcoord, nodes[:, 0]) - coord[0]) + abs( + np.dot(bcoord, nodes[:, 1]) - coord[1] + ) + if (bcoord >= 0).all() and err < tol: + faces[i] = face_id + bcoords[i, :n_nodes] = bcoord[:n_nodes] + break + + else: + # Precompute cartesian values for node coordinates: + nodes = np.stack( + ( + self._source_grid.node_x.to_numpy()[face_node_connectivity], + self._source_grid.node_y.to_numpy()[face_node_connectivity], + self._source_grid.node_z.to_numpy()[face_node_connectivity], + ), + axis=-1, + ) # shape (n_face, N, 3) + + for i, (coord, candidates) in enumerate(zip(coords, candidate_faces)): + for face_id in candidates: + n_nodes = n_nodes_per_face[face_id] + bcoord = np.asarray( + _barycentric_coordinates_cartesian(nodes[face_id], coord) + ) + proj_uv = np.dot(bcoord, nodes[face_id, :, :]) + err = np.linalg.norm(proj_uv - coord) + + if (bcoord >= 0).all() and err < self._ll_tolerance[face_id] + tol: + faces[i] = face_id + bcoords[i] = bcoord + break return faces, bcoords @@ -974,6 +1069,24 @@ def _triangle_area(A, B, C): return 0.5 * abs(A[0] * (B[1] - C[1]) + B[0] * (C[1] - A[1]) + C[0] * (A[1] - B[1])) +@njit(cache=True) +def _local_projection_error(nodes, point): + """ + Computes the size of the local projection error that arises from + assuming planar faces. Effectively, a planar face on a spherical + manifold is local linearization of the spherical coordinate + transformation. Since query points and nodes are converted to + cartesian coordinates using the full spherical coordinate transformation, + the local projection error will likely be non-zero but related to the discretiztaion. + """ + a = nodes[1] - nodes[0] + b = nodes[2] - nodes[0] + normal = np.cross(a, b) + normal /= np.linalg.norm(normal) + d = point - nodes[0] + return abs(np.dot(d, normal)) + + @njit(cache=True) def _triangle_area_cartesian(A, B, C): """ From 761573ead6ac90bff9ad2349ac794d39d5486b6b Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Wed, 21 May 2025 13:12:33 -0400 Subject: [PATCH 4/9] Add test for checking if point is in colinear element at the north pole --- test/test_spatial_hashing.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/test/test_spatial_hashing.py b/test/test_spatial_hashing.py index 3fce348fe..2a1a4a1f0 100644 --- a/test/test_spatial_hashing.py +++ b/test/test_spatial_hashing.py @@ -25,12 +25,13 @@ gridfile_geoflow, gridfile_mpas] -def test_construction(): +# Parameterize test over grid files +@pytest.mark.parametrize("grid_file", grid_files) +def test_construction(grid_file): """Tests the construction of the SpatialHash object""" - for grid_file in grid_files: - uxgrid = ux.open_grid(grid_file) - face_ids, bcoords = uxgrid.get_spatial_hash().query([0.9, 1.8]) - assert face_ids.shape[0] == bcoords.shape[0] + uxgrid = ux.open_grid(grid_file) + face_ids, bcoords = uxgrid.get_spatial_hash().query([0.9, 1.8]) + assert face_ids.shape[0] == bcoords.shape[0] def test_is_inside(): @@ -143,21 +144,30 @@ def test_list_of_coords_mpas_primal(): assert np.all(face_ids >= 0) # All particles should be inside an element def test_barycentric_coordinates_colinear_latlon(): - """Verifies valid spatial hashing for colinear points in latlon coordinates""" - - from uxarray.grid.neighbors import _barycentric_coordinates, _barycentric_coordinates_cartesian + """Verifies valid barycentric coordinates for colinear points in latlon coordinates with a query point inside the face""" + from uxarray.grid.neighbors import _barycentric_coordinates, _barycentric_coordinates_cartesian, _local_projection_error from uxarray.grid.coordinates import _lonlat_rad_to_xyz polygon = np.array([[-45, 87.87916205], [45, 87.87916205], [135, 87.87916205], [-135, 87.87916205]]) - point = np.array([-45, -87.87916205]) - #weights = _barycentric_coordinates(polygon, point) + # North pole + pole = np.array([0, 90]) # Point where the local linear projection error is maximal + point = np.array([-45, 89.87916205]) # A point somewhere in the polar cap # Convert to Cartesian coordinates polygon_rad = np.deg2rad(polygon) point_rad = np.deg2rad(point) + pole_rad = np.deg2rad(pole) x, y, z = _lonlat_rad_to_xyz(polygon_rad[:,0], polygon_rad[:,1]) polygon_cartesian = np.array([x, y, z]).T x, y, z = _lonlat_rad_to_xyz(point_rad[0], point_rad[1]) point_cartesian = np.array([x, y, z]) + x, y, z = _lonlat_rad_to_xyz(pole_rad[0], pole_rad[1]) + pole_cartesian = np.array([x, y, z]) + + max_projection_error = _local_projection_error(polygon_cartesian, pole_cartesian) + weights = _barycentric_coordinates_cartesian(polygon_cartesian, point_cartesian) - print(weights) + proj_uv = np.dot(weights, polygon_cartesian[:, :]) + err = np.linalg.norm(proj_uv - point_cartesian) + + assert err < max_projection_error + ERROR_TOLERANCE, f"Projection error {err} exceeds tolerance {max_projection_error + ERROR_TOLERANCE}" From c1411c13c15d314175d093ce40be7872c99b654e Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Thu, 22 May 2025 20:45:45 -0400 Subject: [PATCH 5/9] Add test on global grid for querying along antimeridian --- test/test_spatial_hashing.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/test/test_spatial_hashing.py b/test/test_spatial_hashing.py index 2a1a4a1f0..9d3a76318 100644 --- a/test/test_spatial_hashing.py +++ b/test/test_spatial_hashing.py @@ -145,7 +145,7 @@ def test_list_of_coords_mpas_primal(): def test_barycentric_coordinates_colinear_latlon(): """Verifies valid barycentric coordinates for colinear points in latlon coordinates with a query point inside the face""" - from uxarray.grid.neighbors import _barycentric_coordinates, _barycentric_coordinates_cartesian, _local_projection_error + from uxarray.grid.neighbors import _barycentric_coordinates_cartesian, _local_projection_error from uxarray.grid.coordinates import _lonlat_rad_to_xyz polygon = np.array([[-45, 87.87916205], [45, 87.87916205], [135, 87.87916205], [-135, 87.87916205]]) @@ -171,3 +171,19 @@ def test_barycentric_coordinates_colinear_latlon(): err = np.linalg.norm(proj_uv - point_cartesian) assert err < max_projection_error + ERROR_TOLERANCE, f"Projection error {err} exceeds tolerance {max_projection_error + ERROR_TOLERANCE}" + + +def test_global_antimeridian_query(): + """Verifies correct values when a query is made across the global antimeridian""" + uxgrid = ux.open_grid(gridfile_CSne30) + points = np.array([[-179.0, 0.0], [-180.0, 0.0], [180.0, 0.0], [179.0, 0.0]]) + face_ids, bcoords = uxgrid.get_spatial_hash(coordinate_system='cartesian',global_grid=True).query(points) + + # # since (-180,0) and (180,0) are the same point, they should return the same face id + # # and the same barycentric coordinates + assert face_ids[2] == face_ids[1] + assert np.allclose(bcoords[2], bcoords[1], atol=ERROR_TOLERANCE) + + # The other points should be found, indepenent of which side of the antimeridian they are on + assert face_ids[0] != -1 + assert face_ids[3] != -1 From e4ce94cebdbf49f9e304c4d7f43d5c0566dbb0de Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Thu, 22 May 2025 20:46:56 -0400 Subject: [PATCH 6/9] Add support for indexing hash table across antimeridian This commit adds an option for indicating if a global grid is used. When enabled, this sets the spatial hash grid domain to [-pi,pi]x[-pi,pi] explicitly Support for cartesian barycentric coordinate calculation in the spatial query is now included. --- uxarray/grid/grid.py | 10 ++++- uxarray/grid/neighbors.py | 93 +++++++++++++++++++++++++++------------ 2 files changed, 73 insertions(+), 30 deletions(-) diff --git a/uxarray/grid/grid.py b/uxarray/grid/grid.py index 9c56a439d..a4b89f091 100644 --- a/uxarray/grid/grid.py +++ b/uxarray/grid/grid.py @@ -1789,6 +1789,8 @@ def get_kd_tree( def get_spatial_hash( self, + coordinate_system: Optional[str] = "spherical", + global_grid: bool = False, reconstruct: bool = False, ): """Get the SpatialHash data structure of this Grid that allows for @@ -1797,6 +1799,10 @@ def get_spatial_hash( Parameters ---------- + coordinate_system : str, default="spherical" + Sets the coordinate type used for barycentric coordinate. + global_grid : bool, default=False + If true, the hash grid is constructed using the domain [-pi,pi] x [-pi,pi] reconstruct : bool, default=False If true, reconstructs the spatial hash @@ -1825,7 +1831,9 @@ def get_spatial_hash( >>> face_ids, bcoords = spatial_hash.query([0.0, 0.0]) """ if self._spatialhash is None or reconstruct: - self._spatialhash = SpatialHash(self, reconstruct) + self._spatialhash = SpatialHash( + self, coordinate_system, global_grid, reconstruct + ) return self._spatialhash diff --git a/uxarray/grid/neighbors.py b/uxarray/grid/neighbors.py index 7ef52b8cc..2528e438e 100644 --- a/uxarray/grid/neighbors.py +++ b/uxarray/grid/neighbors.py @@ -6,6 +6,7 @@ from numpy import deg2rad from uxarray.constants import ERROR_TOLERANCE, INT_DTYPE, INT_FILL_VALUE +from uxarray.grid.coordinates import _lonlat_rad_to_xyz class KDTree: @@ -795,6 +796,8 @@ class SpatialHash: Source grid used to construct the hash grid and hash table coordinate_system : str, default="spherical" Sets the coordinate type used for barycentric coordinate. + global_grid : bool, default=False + If true, the hash grid is constructed using the domain [-pi,pi] x [-pi,pi] reconstruct : bool, default=False If true, reconstructs the spatial hash @@ -806,8 +809,9 @@ class SpatialHash: def __init__( self, grid, - reconstruct: bool = False, coordinate_system: Optional[str] = "spherical", + global_grid: Optional[bool] = False, + reconstruct: bool = False, ): self._source_grid = grid @@ -837,16 +841,23 @@ def __init__( # Hash grid size self._dh = self._hash_cell_size() - # Lower left corner of the hash grid - lon_min = np.deg2rad(self._source_grid.node_lon.min().to_numpy()) - lat_min = np.deg2rad(self._source_grid.node_lat.min().to_numpy()) - lon_max = np.deg2rad(self._source_grid.node_lon.max().to_numpy()) - lat_max = np.deg2rad(self._source_grid.node_lat.max().to_numpy()) + if global_grid: + # Set the hash grid to be a global grid + self._xmin = -np.pi + self._ymin = -np.pi + self._xmax = np.pi + self._ymax = np.pi + else: + # Lower left corner of the hash grid + lon_min = np.deg2rad(self._source_grid.node_lon.min().to_numpy()) + lat_min = np.deg2rad(self._source_grid.node_lat.min().to_numpy()) + lon_max = np.deg2rad(self._source_grid.node_lon.max().to_numpy()) + lat_max = np.deg2rad(self._source_grid.node_lat.max().to_numpy()) - self._xmin = lon_min - self._dh - self._ymin = lat_min - self._dh - self._xmax = lon_max + self._dh - self._ymax = lat_max + self._dh + self._xmin = lon_min - self._dh + self._ymin = lat_min - self._dh + self._xmax = lon_max + self._dh + self._ymax = lat_max + self._dh # Number of x points in the hash grid; used for # array flattening @@ -868,7 +879,9 @@ def _hash_index2d(self, coords): """Computes the 2-d hash index (i,j) for the location (x,y), where x and y are given in spherical coordinates (in degrees)""" - i = ((coords[:, 0] - self._xmin) / self._dh).astype(INT_DTYPE) + # Wrap longitude to [-pi, pi] + lon = (coords[:, 0] + np.pi) % (2 * np.pi) - np.pi + i = ((lon - self._xmin) / self._dh).astype(INT_DTYPE) j = ((coords[:, 1] - self._ymin) / self._dh).astype(INT_DTYPE) return i, j @@ -885,29 +898,37 @@ def _initialize_face_hash_table(self): if self._face_hash_table is None or self.reconstruct: index_to_face = [[] for i in range(self._nx * self._ny)] - lon_bounds = np.sort(self._source_grid.face_bounds_lon.to_numpy(), 1) - lat_bounds = self._source_grid.face_bounds_lat.to_numpy() + lon_bounds = np.deg2rad(self._source_grid.face_bounds_lon.to_numpy()) + lat_bounds = np.deg2rad(self._source_grid.face_bounds_lat.to_numpy()) coords = np.column_stack( ( - np.deg2rad(lon_bounds[:, 0].flatten()), - np.deg2rad(lat_bounds[:, 0].flatten()), + lon_bounds[:, 0].flatten(), + lat_bounds[:, 0].flatten(), ) ) i1, j1 = self._hash_index2d(coords) coords = np.column_stack( ( - np.deg2rad(lon_bounds[:, 1].flatten()), - np.deg2rad(lat_bounds[:, 1].flatten()), + lon_bounds[:, 1].flatten(), + lat_bounds[:, 1].flatten(), ) ) i2, j2 = self._hash_index2d(coords) - try: for eid in range(self._source_grid.n_face): for j in range(j1[eid], j2[eid] + 1): - for i in range(i1[eid], i2[eid] + 1): - index_to_face[i + self._nx * j].append(eid) + if i1[eid] <= i2[eid]: + # Normal case, no wrap + for i in range(i1[eid], i2[eid] + 1): + index_to_face[(i % self._nx) + self._nx * j].append(eid) + else: + # Wrap-around case + for i in range(i1[eid], self._nx): + index_to_face[(i % self._nx) + self._nx * j].append(eid) + for i in range(0, i2[eid] + 1): + index_to_face[(i % self._nx) + self._nx * j].append(eid) + except IndexError: raise IndexError( "list index out of range. This may indicate incorrect `edge_node_distances` values." @@ -974,7 +995,10 @@ def query( Parameters ---------- coords : array_like - coordinate pairs in degrees (lon, lat) to query + coordinate pairs in degrees (lon, lat) or cartesian (x,y,z) to query. If the SpatialHash.coordinate_system is + "spherical", then coords should be in degrees (lon, lat). If the SpatialHash.coordinate_system is "cartesian", + then coords can either be in degrees (lon, lat) or cartesian (x,y,z). + in_radians : bool, optional if True, queries assuming coords are inputted in radians, not degrees. Only applies to spherical coords @@ -991,9 +1015,20 @@ def query( if self.coordinate_system == "spherical": coords = _prepare_xy_for_query(coords, in_radians, distance_metric=None) else: - # TODO: allow user to specify either spherical or cartesian query coordinates. + # allow user to specify either spherical or cartesian query coordinates. # If the self.coordinate_system is cartesian, then the coords should be cartesian - coords = _prepare_xyz_for_query(coords) + # expand if only a single node pair is provided + if coords.ndim == 1: + coords = np.expand_dims(coords, axis=0) + + if coords.shape[1] == 2: + coords = _prepare_xy_for_query(coords, in_radians, distance_metric=None) + + # Calculate corresponding cartesian coordinates + x, y, z = _lonlat_rad_to_xyz(coords[:, 0], coords[:, 1]) + cart_coords = np.array([x, y, z]).T + + cart_coords = _prepare_xyz_for_query(cart_coords) num_coords = coords.shape[0] max_nodes = self._source_grid.n_max_face_nodes @@ -1006,16 +1041,16 @@ def query( n_nodes_per_face = self._source_grid.n_nodes_per_face.to_numpy() face_node_connectivity = self._source_grid.face_node_connectivity.to_numpy() + # Get the list of candidate faces for each coordinate + candidate_faces = [ + self._face_hash_table[pid] for pid in self._hash_index(coords) + ] + if self.coordinate_system == "spherical": # Precompute radian values for node coordinates: node_lon = np.deg2rad(self._source_grid.node_lon.to_numpy()) node_lat = np.deg2rad(self._source_grid.node_lat.to_numpy()) - # Get the list of candidate faces for each coordinate - candidate_faces = [ - self._face_hash_table[pid] for pid in self._hash_index(coords) - ] - if self.coordinate_system == "spherical": for i, (coord, candidates) in enumerate(zip(coords, candidate_faces)): for face_id in candidates: @@ -1044,7 +1079,7 @@ def query( axis=-1, ) # shape (n_face, N, 3) - for i, (coord, candidates) in enumerate(zip(coords, candidate_faces)): + for i, (coord, candidates) in enumerate(zip(cart_coords, candidate_faces)): for face_id in candidates: n_nodes = n_nodes_per_face[face_id] bcoord = np.asarray( From 200e1b00a13028ee74d9b74ad752979970eeee64 Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Fri, 23 May 2025 11:23:19 -0400 Subject: [PATCH 7/9] Change vertex ordering to counter-clockwise This is necessary to obtain the correct orientation for detecting antimeridian faces --- test/test_spatial_hashing.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/test/test_spatial_hashing.py b/test/test_spatial_hashing.py index 9d3a76318..d2d5471d8 100644 --- a/test/test_spatial_hashing.py +++ b/test/test_spatial_hashing.py @@ -36,7 +36,7 @@ def test_construction(grid_file): def test_is_inside(): """Verifies simple test for points inside and outside an element.""" - verts = [(0.0, 90.0), (-180, 0.0), (0.0, -90)] + verts = [(-180, 0.0), (0.0, -90.0), (0.0, 90.0)] uxgrid = ux.open_grid(verts, latlon=True) # Verify that a point outside the element returns a face id of -1 face_ids, bcoords = uxgrid.get_spatial_hash().query([90.0, 0.0]) @@ -45,43 +45,43 @@ def test_is_inside(): face_ids, bcoords = uxgrid.get_spatial_hash().query([-90.0, 0.0]) assert face_ids[0] == 0 - assert np.allclose(bcoords[0], [0.25, 0.5, 0.25], atol=1e-06) + assert np.allclose(bcoords[0], [0.5, 0.25, 0.25], atol=1e-06) def test_query_on_vertex(): """Verifies correct values when a query is made exactly on a vertex""" - verts = [(0.0, 90.0), (-180, 0.0), (0.0, -90)] + verts = [(-180, 0.0), (0.0, -90.0), (0.0, 90.0)] uxgrid = ux.open_grid(verts, latlon=True) # Verify that a point outside the element returns a face id of -1 face_ids, bcoords = uxgrid.get_spatial_hash().query([0.0, 90.0]) assert face_ids[0] == 0 - assert np.isclose(bcoords[0,0],1.0,atol=ERROR_TOLERANCE) + assert np.isclose(bcoords[0,2],1.0,atol=ERROR_TOLERANCE) assert np.isclose(bcoords[0,1],0.0,atol=ERROR_TOLERANCE) - assert np.isclose(bcoords[0,2],0.0,atol=ERROR_TOLERANCE) + assert np.isclose(bcoords[0,0],0.0,atol=ERROR_TOLERANCE) def test_query_on_edge(): """Verifies correct values when a query is made exactly on an edge of a face""" - verts = [(0.0, 90.0), (-180, 0.0), (0.0, -90)] + verts = [(-180, 0.0), (0.0, -90.0), (0.0, 90.0)] uxgrid = ux.open_grid(verts, latlon=True) # Verify that a point outside the element returns a face id of -1 face_ids, bcoords = uxgrid.get_spatial_hash().query([0.0, 0.0]) assert face_ids[0] == 0 - assert np.isclose(bcoords[0,0],0.5,atol=ERROR_TOLERANCE) - assert np.isclose(bcoords[0,1],0.0,atol=ERROR_TOLERANCE) + assert np.isclose(bcoords[0,0],0.0,atol=ERROR_TOLERANCE) + assert np.isclose(bcoords[0,1],0.5,atol=ERROR_TOLERANCE) assert np.isclose(bcoords[0,2],0.5,atol=ERROR_TOLERANCE) def test_list_of_coords_simple(): """Verifies test using list of points inside and outside an element""" - verts = [(0.0, 90.0), (-180, 0.0), (0.0, -90)] + verts = [(-180, 0.0), (0.0, -90.0), (0.0, 90.0)] uxgrid = ux.open_grid(verts, latlon=True) coords = [[90.0, 0.0], [-90.0, 0.0]] face_ids, bcoords = uxgrid.get_spatial_hash().query(coords) assert face_ids[0] == -1 assert face_ids[1] == 0 - assert np.allclose(bcoords[1], [0.25, 0.5, 0.25], atol=1e-06) + assert np.allclose(bcoords[1], [0.5, 0.25, 0.25], atol=1e-06) def test_list_of_coords_fesom(): From c7a169c72d01a44a05b3dbe4546fc7a9e99445a9 Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Thu, 17 Jul 2025 16:18:10 -0400 Subject: [PATCH 8/9] Set in_radians to true to be consistent with test input --- test/test_spatial_hashing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_spatial_hashing.py b/test/test_spatial_hashing.py index d2d5471d8..f3285faaa 100644 --- a/test/test_spatial_hashing.py +++ b/test/test_spatial_hashing.py @@ -117,7 +117,7 @@ def test_list_of_coords_mpas_dual(): for k in range(num_particles): coords[k,0] = np.deg2rad(np.random.uniform(x_min, x_max)) coords[k,1] = np.deg2rad(np.random.uniform(y_min, y_max)) - face_ids, bcoords = uxgrid.get_spatial_hash().query(coords) + face_ids, bcoords = uxgrid.get_spatial_hash().query(coords,in_radians=True) assert len(face_ids) == num_particles assert bcoords.shape[0] == num_particles assert bcoords.shape[1] == 3 # max sides of an element @@ -137,7 +137,7 @@ def test_list_of_coords_mpas_primal(): for k in range(num_particles): coords[k,0] = np.deg2rad(np.random.uniform(x_min, x_max)) coords[k,1] = np.deg2rad(np.random.uniform(y_min, y_max)) - face_ids, bcoords = uxgrid.get_spatial_hash().query(coords) + face_ids, bcoords = uxgrid.get_spatial_hash().query(coords, in_radians=True) assert len(face_ids) == num_particles assert bcoords.shape[0] == num_particles assert bcoords.shape[1] == 6 # max sides of an element From 5fa0d3d2e1b86e7c638a6958eedd27d585bf04f5 Mon Sep 17 00:00:00 2001 From: Joe Schoonover Date: Mon, 21 Jul 2025 15:38:19 -0400 Subject: [PATCH 9/9] Allow users to send in cartesian coordinates as well --- uxarray/grid/neighbors.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/uxarray/grid/neighbors.py b/uxarray/grid/neighbors.py index 8bcbd68f5..ea8224e24 100644 --- a/uxarray/grid/neighbors.py +++ b/uxarray/grid/neighbors.py @@ -1024,9 +1024,12 @@ def query( if coords.shape[1] == 2: coords = _prepare_xy_for_query(coords, in_radians, distance_metric=None) - # Calculate corresponding cartesian coordinates - x, y, z = _lonlat_rad_to_xyz(coords[:, 0], coords[:, 1]) - cart_coords = np.array([x, y, z]).T + # Calculate corresponding cartesian coordinates + x, y, z = _lonlat_rad_to_xyz(coords[:, 0], coords[:, 1]) + cart_coords = np.array([x, y, z]).T + + else: + cart_coords = coords cart_coords = _prepare_xyz_for_query(cart_coords) @@ -1087,6 +1090,9 @@ def query( ) proj_uv = np.dot(bcoord, nodes[face_id, :, :]) err = np.linalg.norm(proj_uv - coord) + print( + f"bcoord: {bcoord}, err: {err}, face_id: {face_id}, coord: {coord}" + ) if (bcoord >= 0).all() and err < self._ll_tolerance[face_id] + tol: faces[i] = face_id