-
Notifications
You must be signed in to change notification settings - Fork 171
Expand file tree
/
Copy pathfieldset.py
More file actions
586 lines (481 loc) · 23.9 KB
/
fieldset.py
File metadata and controls
586 lines (481 loc) · 23.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
from __future__ import annotations
import functools
from collections.abc import Iterable
from typing import TYPE_CHECKING
import cf_xarray # noqa: F401
import numpy as np
import uxarray as ux
import xarray as xr
import xgcm
from parcels._core.field import Field, VectorField
from parcels._core.utils import sgrid
from parcels._core.utils.string import _assert_str_and_python_varname
from parcels._core.utils.time import get_datetime_type_calendar
from parcels._core.utils.time import is_compatible as datetime_is_compatible
from parcels._core.uxgrid import UxGrid
from parcels._core.xgrid import _DEFAULT_XGCM_KWARGS, XGrid
from parcels._logger import logger
from parcels._reprs import fieldset_repr
from parcels._typing import Mesh
from parcels.interpolators import (
CGrid_Velocity,
Ux_Velocity,
UxConstantFaceConstantZC,
UxConstantFaceLinearZF,
UxLinearNodeConstantZC,
UxLinearNodeLinearZF,
XConstantField,
XLinear,
XLinear_Velocity,
)
if TYPE_CHECKING:
from parcels._core.basegrid import BaseGrid
from parcels._typing import TimeLike
__all__ = ["FieldSet"]
class FieldSet:
"""FieldSet class that holds hydrodynamic data needed to execute particles.
Parameters
----------
ds : xarray.Dataset | uxarray.UxDataset)
xarray.Dataset and/or uxarray.UxDataset objects containing the field data.
Notes
-----
The `ds` object is a xarray.Dataset or uxarray.UxDataset object.
In XArray terminology, the (Ux)Dataset holds multiple (Ux)DataArray objects.
Each (Ux)DataArray object is a single "field" that is associated with their own
dimensions and coordinates within the (Ux)Dataset.
A (Ux)Dataset object is associated with a single mesh, which can have multiple
types of "points" (multiple "grids") (e.g. for UxDataSets, these are "face_lon",
"face_lat", "node_lon", "node_lat", "edge_lon", "edge_lat"). Each (Ux)DataArray is
registered to a specific set of points on the mesh.
For UxDataset objects, each `UXDataArray.attributes` field dictionary contains
the necessary metadata to help determine which set of points a field is registered
to and what parent model the field is associated with. Parcels uses this metadata
during execution for interpolation. Each `UXDataArray.attributes` field dictionary
must have:
* "location" key set to "face", "node", or "edge" to define which pairing of points a field is associated with.
* "mesh" key to define which parent model the fields are associated with (e.g. "fesom_mesh", "icon_mesh")
"""
def __init__(self, fields: list[Field | VectorField]):
for field in fields:
if not isinstance(field, (Field, VectorField)):
raise ValueError(f"Expected `field` to be a Field or VectorField object. Got {field}")
assert_compatible_calendars(fields)
self.fields = {f.name: f for f in fields}
self.constants: dict[str, float] = {}
def __getattr__(self, name):
"""Get the field by name. If the field is not found, check if it's a constant."""
if name in self.fields:
return self.fields[name]
elif name in self.constants:
return self.constants[name]
else:
raise AttributeError(f"FieldSet has no attribute '{name}'")
def __repr__(self):
return fieldset_repr(self)
@property
def time_interval(self):
"""Returns the valid executable time interval of the FieldSet,
which is the intersection of the time intervals of all fields
in the FieldSet.
"""
time_intervals = (f.time_interval for f in self.fields.values())
# Filter out Nones from constant Fields
time_intervals = [t for t in time_intervals if t is not None]
if len(time_intervals) == 0: # All fields are constant fields
return None
return functools.reduce(lambda x, y: x.intersection(y), time_intervals)
def add_field(self, field: Field, name: str | None = None):
"""Add a :class:`parcels.field.Field` object to the FieldSet.
Parameters
----------
field : parcels.field.Field
Field object to be added
name : str
Name of the :class:`parcels.field.Field` object to be added. Defaults
to name in Field object.
"""
if not isinstance(field, (Field, VectorField)):
raise ValueError(f"Expected `field` to be a Field or VectorField object. Got {type(field)}")
assert_compatible_calendars((*self.fields.values(), field))
name = field.name if name is None else name
if name in self.fields:
raise ValueError(f"FieldSet already has a Field with name '{name}'")
self.fields[name] = field
def add_constant_field(self, name: str, value, mesh: Mesh = "spherical"):
"""Wrapper function to add a Field that is constant in space,
useful e.g. when using constant horizontal diffusivity
Parameters
----------
name : str
Name of the :class:`parcels.field.Field` object to be added
value :
Value of the constant field
mesh : str
String indicating the type of mesh coordinates,
1. spherical (default): Lat and lon in degree, with a
correction for zonal velocity U near the poles.
2. flat: No conversion, lat/lon are assumed to be in m.
"""
ds = xr.Dataset(
{name: (["lat", "lon"], np.full((1, 1), value))},
coords={"lat": (["lat"], [0], {"axis": "Y"}), "lon": (["lon"], [0], {"axis": "X"})},
)
xgrid = xgcm.Grid(
ds, coords={"X": {"left": "lon"}, "Y": {"left": "lat"}}, autoparse_metadata=False, **_DEFAULT_XGCM_KWARGS
)
grid = XGrid(xgrid, mesh=mesh)
self.add_field(Field(name, ds[name], grid, interp_method=XConstantField))
def add_constant(self, name, value):
"""Add a constant to the FieldSet. Note that all constants are
stored as 32-bit floats.
Parameters
----------
name : str
Name of the constant
value :
Value of the constant (stored as 32-bit float)
Examples
--------
Tutorials using fieldset.add_constant:
`Analytical advection <../examples/tutorial_analyticaladvection.ipynb>`__
`Diffusion <../examples/tutorial_diffusion.ipynb>`__
`Periodic boundaries <../examples/tutorial_periodic_boundaries.ipynb>`__
"""
_assert_str_and_python_varname(name)
if name in self.constants:
raise ValueError(f"FieldSet already has a constant with name '{name}'")
if not isinstance(value, (float, np.floating, int, np.integer)):
raise ValueError(f"FieldSet constants have to be of type float or int, got a {type(value)}")
self.constants[name] = np.float32(value)
@property
def gridset(self) -> list[BaseGrid]:
grids: list[BaseGrid] = []
for field in self.fields.values():
if field.grid not in grids:
grids.append(field.grid)
return grids
@classmethod
def from_ugrid_conventions(cls, ds: ux.UxDataset, mesh: str = "spherical"):
"""Create a FieldSet from a Parcels compliant uxarray.UxDataset.
The main requirements for a uxDataset are naming conventions for vertical grid dimensions & coordinates
zf - Name for coordinate and dimension for vertical positions at layer interfaces
zc - Name for coordinate and dimension for vertical positions at layer centers
Parameters
----------
ds : uxarray.UxDataset
uxarray.UxDataset as obtained from the uxarray package but with appropriate named vertical dimensions
Returns
-------
FieldSet
FieldSet object containing the fields from the dataset that can be used for a Parcels simulation.
"""
ds_dims = list(ds.dims)
if not all(dim in ds_dims for dim in ["time", "zf", "zc"]):
raise ValueError(
f"Dataset missing one of the required dimensions 'time', 'zf', or 'zc' for uxDataset. Found dimensions {ds_dims}"
)
grid = UxGrid(ds.uxgrid, z=ds.coords["zf"], mesh=mesh)
ds = _discover_ux_U_and_V(ds)
fields = {}
if "U" in ds.data_vars and "V" in ds.data_vars:
fields["U"] = Field("U", ds["U"], grid, _select_uxinterpolator(ds["U"]))
fields["V"] = Field("V", ds["V"], grid, _select_uxinterpolator(ds["V"]))
if "W" in ds.data_vars:
fields["W"] = Field("W", ds["W"], grid, _select_uxinterpolator(ds["W"]))
fields["UVW"] = VectorField(
"UVW", fields["U"], fields["V"], fields["W"], vector_interp_method=Ux_Velocity
)
else:
fields["UV"] = VectorField("UV", fields["U"], fields["V"], vector_interp_method=Ux_Velocity)
for varname in set(ds.data_vars) - set(fields.keys()):
fields[varname] = Field(varname, ds[varname], grid, _select_uxinterpolator(ds[varname]))
return cls(list(fields.values()))
@classmethod
def from_fesom2(cls, ds: ux.UxDataset, mesh: str = "spherical"):
"""Create a FieldSet from a FESOM2 uxarray.UxDataset.
Parameters
----------
ds : uxarray.UxDataset
uxarray.UxDataset as obtained from the uxarray package.
Returns
-------
FieldSet
FieldSet object containing the fields from the dataset that can be used for a Parcels simulation.
"""
ds = ds.copy()
ds_dims = list(ds.dims)
if not all(dim in ds_dims for dim in ["time", "nz", "nz1"]):
raise ValueError(
f"Dataset missing one of the required dimensions 'time', 'nz', or 'nz1' for FESOM data. Found dimensions {ds_dims}"
)
ds = ds.rename(
{
"nz": "zf", # Vertical Interface
"nz1": "zc", # Vertical Center
}
).set_index(zf="zf", zc="zc")
return FieldSet.from_ugrid_conventions(ds, mesh=mesh)
@classmethod
def from_icon(cls, ds: ux.UxDataset, mesh: str = "spherical"):
"""Create a FieldSet from a ICON uxarray.UxDataset.
Parameters
----------
ds : uxarray.UxDataset
uxarray.UxDataset as obtained from the uxarray package.
Returns
-------
FieldSet
FieldSet object containing the fields from the dataset that can be used for a Parcels simulation.
"""
ds = ds.copy()
ds_dims = list(ds.dims)
if not all(dim in ds_dims for dim in ["time", "depth", "depth_2"]):
raise ValueError(
f"Dataset missing one of the required dimensions 'time', 'depth', or 'depth_2' for ICON data. Found dimensions {ds_dims}"
)
ds = ds.rename(
{
"depth_2": "zf", # Vertical Interface
"depth": "zc", # Vertical Center
}
).set_index(zf="zf", zc="zc")
return FieldSet.from_ugrid_conventions(ds, mesh=mesh)
@classmethod
def from_sgrid_conventions(
cls, ds: xr.Dataset, mesh: Mesh | None = None
): # TODO: Update mesh to be discovered from the dataset metadata
"""Create a FieldSet from a dataset using SGRID convention metadata.
This is the primary ingestion method in Parcels for structured grid datasets.
Assumes that U, V, (and optionally W) variables are named 'U', 'V', and 'W' in the dataset.
Parameters
----------
ds : xarray.Dataset
xarray.Dataset with SGRID convention metadata.
mesh : str
String indicating the type of mesh coordinates used during
velocity interpolation. Options are "spherical" or "flat".
Returns
-------
FieldSet
FieldSet object containing the fields from the dataset that can be used for a Parcels simulation.
Notes
-----
This method uses the SGRID convention metadata to parse the grid structure
and create appropriate Fields for a Parcels simulation. The dataset should
contain a variable with 'cf_role' attribute set to 'grid_topology'.
See https://sgrid.github.io/ for more information on the SGRID conventions.
"""
ds = ds.copy()
if mesh is None:
mesh = _get_mesh_type_from_sgrid_dataset(ds)
# Ensure time dimension has axis attribute if present
if "time" in ds.dims and "time" in ds.coords:
if "axis" not in ds["time"].attrs:
logger.debug(
"Dataset contains 'time' dimension but no 'axis' attribute. Setting 'axis' attribute to 'T'."
)
ds["time"].attrs["axis"] = "T"
# Find time dimension based on axis attribute and rename to `time`
if (time_dims := ds.cf.axes.get("T")) is not None:
if len(time_dims) > 1:
raise ValueError("Multiple time coordinates found in dataset. This is not supported by Parcels.")
(time_dim,) = time_dims
if time_dim != "time":
logger.debug(f"Renaming time axis coordinate from {time_dim} to 'time'.")
ds = ds.rename({time_dim: "time"})
# Parse SGRID metadata and get xgcm kwargs
_, xgcm_kwargs = sgrid.parse_sgrid(ds)
# Add time axis to xgcm_kwargs if present
if "time" in ds.dims:
if "T" not in xgcm_kwargs["coords"]:
xgcm_kwargs["coords"]["T"] = {"center": "time"}
if "lon" not in ds.coords or "lat" not in ds.coords:
node_dimensions = sgrid.load_mappings(ds.grid.node_dimensions)
ds["lon"] = ds[node_dimensions[0]]
ds["lat"] = ds[node_dimensions[1]]
# Create xgcm Grid object
xgcm_grid = xgcm.Grid(ds, autoparse_metadata=False, **xgcm_kwargs, **_DEFAULT_XGCM_KWARGS)
# Wrap in XGrid
grid = XGrid(xgcm_grid, mesh=mesh)
# Create fields from data variables, skipping grid metadata variables
# Skip variables that are SGRID metadata (have cf_role='grid_topology')
skip_vars = set()
for var in ds.data_vars:
if ds[var].attrs.get("cf_role") == "grid_topology":
skip_vars.add(var)
fields = {}
if "U" in ds.data_vars and "V" in ds.data_vars:
vector_interp_method = XLinear_Velocity if _is_agrid(ds) else CGrid_Velocity
fields["U"] = Field("U", ds["U"], grid, XLinear)
fields["V"] = Field("V", ds["V"], grid, XLinear)
fields["UV"] = VectorField("UV", fields["U"], fields["V"], vector_interp_method=vector_interp_method)
if "W" in ds.data_vars:
fields["W"] = Field("W", ds["W"], grid, XLinear)
fields["UVW"] = VectorField(
"UVW", fields["U"], fields["V"], fields["W"], vector_interp_method=vector_interp_method
)
for varname in set(ds.data_vars) - set(fields.keys()) - skip_vars:
fields[varname] = Field(varname, ds[varname], grid, XLinear)
return cls(list(fields.values()))
class CalendarError(Exception): # TODO: Move to a parcels errors module
"""Exception raised when the calendar of a field is not compatible with the rest of the Fields. The user should ensure that they only add fields to a FieldSet that have compatible CFtime calendars."""
def assert_compatible_calendars(fields: Iterable[Field | VectorField]):
time_intervals = [f.time_interval for f in fields if f.time_interval is not None]
if len(time_intervals) == 0: # All time intervals are none
return
reference_datetime_object = time_intervals[0].left
for field in fields:
if field.time_interval is None:
continue
if not datetime_is_compatible(reference_datetime_object, field.time_interval.left):
msg = _format_calendar_error_message(field, reference_datetime_object)
raise CalendarError(msg)
def _datetime_to_msg(example_datetime: TimeLike) -> str:
datetime_type, calendar = get_datetime_type_calendar(example_datetime)
msg = str(datetime_type)
if calendar is not None:
msg += f" with cftime calendar {calendar}'"
return msg
def _format_calendar_error_message(field: Field | VectorField, reference_datetime: TimeLike) -> str:
assert field.time_interval is not None
return f"Expected field {field.name!r} to have calendar compatible with datetime object {_datetime_to_msg(reference_datetime)}. Got field with calendar {_datetime_to_msg(field.time_interval.left)}. Have you considered using xarray to update the time dimension of the dataset to have a compatible calendar?"
_COPERNICUS_MARINE_AXIS_VARNAMES = {
"X": "lon",
"Y": "lat",
"Z": "depth",
"T": "time",
}
_COPERNICUS_MARINE_CF_STANDARD_NAME_FALLBACKS = {
"UV": [
(
"eastward_sea_water_velocity",
"northward_sea_water_velocity",
), # GLOBAL_ANALYSISFORECAST_PHY_001_024, MEDSEA_ANALYSISFORECAST_PHY_006_013, BALTICSEA_ANALYSISFORECAST_PHY_003_006, BLKSEA_ANALYSISFORECAST_PHY_007_001, IBI_ANALYSISFORECAST_PHY_005_001, NWSHELF_ANALYSISFORECAST_PHY_004_013, MULTIOBS_GLO_PHY_MYNRT_015_003, MULTIOBS_GLO_PHY_W_3D_REP_015_007
(
"surface_geostrophic_eastward_sea_water_velocity",
"surface_geostrophic_northward_sea_water_velocity",
), # SEALEVEL_GLO_PHY_L4_MY_008_047, SEALEVEL_EUR_PHY_L4_NRT_008_060
(
"geostrophic_eastward_sea_water_velocity",
"geostrophic_northward_sea_water_velocity",
), # MULTIOBS_GLO_PHY_TSUV_3D_MYNRT_015_012
(
"sea_surface_wave_stokes_drift_x_velocity",
"sea_surface_wave_stokes_drift_y_velocity",
), # GLOBAL_ANALYSISFORECAST_WAV_001_027, MEDSEA_MULTIYEAR_WAV_006_012, ARCTIC_ANALYSIS_FORECAST_WAV_002_014, BLKSEA_ANALYSISFORECAST_WAV_007_003, IBI_ANALYSISFORECAST_WAV_005_005, NWSHELF_ANALYSISFORECAST_WAV_004_014
("sea_water_x_velocity", "sea_water_y_velocity"), # ARCTIC_ANALYSISFORECAST_PHY_002_001
(
"eastward_sea_water_velocity_vertical_mean_over_pelagic_layer",
"northward_sea_water_velocity_vertical_mean_over_pelagic_layer",
), # GLOBAL_MULTIYEAR_BGC_001_033
],
"W": ["upward_sea_water_velocity", "vertical_sea_water_velocity"],
}
# TODO v4: This function (i.e., discovering U and V from standard names) will be removed in future
# https://github.com/Parcels-code/Parcels/pull/2422#discussion_r2708467046
def _ds_rename_using_standard_names(ds: xr.Dataset | ux.UxDataset, name_dict: dict[str, str]) -> xr.Dataset:
for standard_name, rename_to in name_dict.items():
name = ds.cf[standard_name].name
ds = ds.rename({name: rename_to})
logger.info(
f"cf_xarray found variable {name!r} with CF standard name {standard_name!r} in dataset, renamed it to {rename_to!r} for Parcels simulation."
)
return ds
def _discover_ux_U_and_V(ds: ux.UxDataset) -> ux.UxDataset:
# Common variable names for U and V found in UxDatasets
common_ux_UV = [("unod", "vnod"), ("u", "v")]
common_ux_W = ["w"]
if "W" not in ds:
for common_W in common_ux_W:
if common_W in ds:
ds = _ds_rename_using_standard_names(ds, {common_W: "W"})
break
if "U" in ds and "V" in ds:
return ds # U and V already present
elif "U" in ds or "V" in ds:
raise ValueError(
"Dataset has only one of the two variables 'U' and 'V'. Please rename the appropriate variable in your dataset to have both 'U' and 'V' for Parcels simulation."
)
for common_U, common_V in common_ux_UV:
if common_U in ds:
if common_V not in ds:
raise ValueError(
f"Dataset has variable with standard name {common_U!r}, "
f"but not the matching variable with standard name {common_V!r}. "
"Please rename the appropriate variables in your dataset to have both 'U' and 'V' for Parcels simulation."
)
else:
ds = _ds_rename_using_standard_names(ds, {common_U: "U", common_V: "V"})
break
else:
if common_V in ds:
raise ValueError(
f"Dataset has variable with standard name {common_V!r}, "
f"but not the matching variable with standard name {common_U!r}. "
"Please rename the appropriate variables in your dataset to have both 'U' and 'V' for Parcels simulation."
)
continue
return ds
def _select_uxinterpolator(da: ux.UxDataArray):
"""Selects the appropriate uxarray interpolator for a given uxarray UxDataArray"""
supported_uxinterp_mapping = {
# (zc,n_face): face-center laterally, layer centers vertically — piecewise constant
"zc,n_face": UxConstantFaceConstantZC,
# (zc,n_node): node/corner laterally, layer centers vertically — barycentric lateral & piecewise constant vertical
"zc,n_node": UxLinearNodeConstantZC,
# (zf,n_node): node/corner laterally, layer interfaces vertically — barycentric lateral & linear vertical
"zf,n_node": UxLinearNodeLinearZF,
# (zf,n_face): face-center laterally, layer interfaces vertically — piecewise constant lateral & linear vertical
"zf,n_face": UxConstantFaceLinearZF,
}
# Extract only spatial dimensions, neglecting time
da_spatial_dims = tuple(d for d in da.dims if d not in ("time",))
if len(da_spatial_dims) != 2:
raise ValueError(
"Fields on unstructured grids must have two spatial dimensions, one vertical (zf or zc) and one lateral (n_face, n_edge, or n_node)"
)
# Construct key (string) for mapping to interpolator
# Find vertical and lateral tokens
vdim = None
ldim = None
for d in da_spatial_dims:
if d in ("zf", "zc"):
vdim = d
if d in ("n_face", "n_node"):
ldim = d
# Map to supported interpolators
if vdim and ldim:
key = f"{vdim},{ldim}"
if key in supported_uxinterp_mapping.keys():
return supported_uxinterp_mapping[key]
return None
# TODO: Refactor later into something like `parcels._metadata.discover(dataset)` helper that can be used to discover important metadata like this. I think this whole metadata handling should be refactored into its own module.
def _get_mesh_type_from_sgrid_dataset(ds_sgrid: xr.Dataset) -> Mesh:
"""Small helper to inspect SGRID metadata and dataset metadata to determine mesh type."""
grid_da = sgrid.get_grid_topology(ds_sgrid)
if grid_da is None:
raise ValueError("Dataset does not contain SGRID grid topology metadata (cf_role='grid_topology').")
sgrid_metadata = sgrid.parse_grid_attrs(grid_da.attrs)
fpoint_x, fpoint_y = sgrid_metadata.node_coordinates
if _is_coordinate_in_degrees(ds_sgrid[fpoint_x]) ^ _is_coordinate_in_degrees(ds_sgrid[fpoint_x]):
msg = (
f"Mismatch in units between X and Y coordinates.\n"
f" Coordinate {ds_sgrid[fpoint_x]!r} attrs: {ds_sgrid[fpoint_x].attrs}\n"
f" Coordinate {ds_sgrid[fpoint_y]!r} attrs: {ds_sgrid[fpoint_y].attrs}\n"
)
raise ValueError(msg)
return "spherical" if _is_coordinate_in_degrees(ds_sgrid[fpoint_x]) else "flat"
def _is_agrid(ds: xr.Dataset) -> bool:
# check if U and V are defined on the same dimensions
# if yes, interpret as A grid
return set(ds["U"].dims) == set(ds["V"].dims)
def _is_coordinate_in_degrees(da: xr.DataArray) -> bool:
units = da.attrs.get("units")
if units is None:
raise ValueError(
f"Coordinate {da.name!r} of your dataset has no 'units' attribute - we don't know what the spatial units are."
)
if isinstance(units, str) and "degree" in units.lower():
return True
return False