Skip to content

Commit e0b89c4

Browse files
mjrenomjreno
authored andcommitted
support structured lat/lon
1 parent c8f1587 commit e0b89c4

File tree

8 files changed

+300
-222
lines changed

8 files changed

+300
-222
lines changed

.docs/Notebooks/mf6_netcdf01_tutorial.py

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -164,10 +164,10 @@
164164

165165
# ## Create NetCDF based simulation method 2
166166
#
167-
# In this method we will set the FloPy `netcdf` argument to `nofile`
168-
# when `write_simulation()` is called. As such, FloPy will not generate
169-
# the NetCDF file automatically. We will manage the NetCDF file
170-
# generation ourselves in method 2.
167+
# In this method we will set the FloPy `netcdf` argument to `` (empty
168+
# string) when `write_simulation()` is called. As such, FloPy will
169+
# not generate the NetCDF file automatically. We will manage the NetCDF
170+
# file generation ourselves in method 2.
171171
#
172172
# Reset the simulation path and set the `GWF` name file `nc_filerecord`
173173
# attribute to the name of the intended input NetCDF file. Display
@@ -188,7 +188,7 @@
188188
gwf.name_file.nc_filerecord = "uzf01.structured.nc"
189189
# write simulation with ASCII inputs tagged for NetCDF
190190
# but do not create NetCDF file
191-
sim.write_simulation(netcdf="nofile")
191+
sim.write_simulation(netcdf="")
192192

193193
# ## Show name file with NetCDF input configured
194194

@@ -223,7 +223,7 @@
223223
# details related to creating the variable, including dimensions,
224224
# shape and data type.
225225
#
226-
# The dictionaries are available via the `netcdf_info()` call. Their
226+
# The dictionaries are available via the `netcdf_meta()` call. Their
227227
# content also varies depending on the desired type of dataset (i.e.
228228
# `structured` or `layered`). In this step we will access the model
229229
# NetCDF metadata and use it to update dataset scoped attributes.
@@ -238,12 +238,12 @@
238238
# Examples of this approach (with package objects) are shown below.
239239

240240
# get model netcdf info
241-
nc_info = gwf.netcdf_info()
242-
pprint(nc_info)
241+
nc_meta = gwf.netcdf_meta()
242+
pprint(nc_meta)
243243

244244
# update dataset directly with required attributes
245-
for a in nc_info["attrs"]:
246-
ds.attrs[a] = nc_info["attrs"][a]
245+
for a in nc_meta["attrs"]:
246+
ds.attrs[a] = nc_meta["attrs"][a]
247247

248248
# ## Update the dataset with supported `DIS` arrays
249249
#
@@ -260,16 +260,16 @@
260260
# ## Access `NPF` package NetCDF attributes
261261
#
262262
# Access package scoped NetCDF details by storing the dictionary returned
263-
# from `netcdf_info()`.
263+
# from `netcdf_meta()`.
264264
#
265265
# The contents of the info dictionary are shown and then, in the following
266266
# step, the dictionary and the dataset are passed to a helper routine that
267267
# create the intended array variables.
268268

269269
# get npf package netcdf info
270270
npf = gwf.get_package("npf")
271-
nc_info = npf.netcdf_info()
272-
pprint(nc_info)
271+
nc_meta = npf.netcdf_meta()
272+
pprint(nc_meta)
273273

274274
# ## Update package NetCDF metadata dictionary and dataset
275275
#
@@ -284,9 +284,9 @@
284284
# values.
285285

286286
# update dataset with `NPF` arrays
287-
nc_info["k"]["varname"] = "npf_k_updated"
288-
nc_info["k"]["attrs"]["standard_name"] = "soil_hydraulic_conductivity_at_saturation"
289-
ds = npf.update_dataset(ds, netcdf_info=nc_info)
287+
nc_meta["k"]["varname"] = "npf_k_updated"
288+
nc_meta["k"]["attrs"]["standard_name"] = "soil_hydraulic_conductivity_at_saturation"
289+
ds = npf.update_dataset(ds, netcdf_meta=nc_meta)
290290

291291
# ## Show dataset `NPF K` parameter with updates
292292

@@ -335,7 +335,7 @@
335335
# ## Method 3: DIY with xarray
336336
#
337337
# The above method still uses FloPy objects to update the dataset arrays
338-
# to values consistent with the state of the objects. The `netcdf_info`
338+
# to values consistent with the state of the objects. The `netcdf_meta`
339339
# dictionary is intended to supported creation of the dataset without
340340
# an existing simulation defined. The base dataset can be defined with
341341
# `modelgrid` and `modeltime` objects, while the full package netcdf
@@ -345,16 +345,16 @@
345345

346346
# ## Demonstrate static call on MFPackage (structured dataset):
347347

348-
netcdf_info = flopy.mf6.mfpackage.MFPackage.netcdf_package(
348+
netcdf_meta = flopy.mf6.mfpackage.MFPackage.netcdf_package(
349349
mtype="GWF",
350350
ptype="NPF",
351351
auxiliary=["CONCENTRATION"],
352352
)
353-
pprint(netcdf_info)
353+
pprint(netcdf_meta)
354354

355355
# ## Demonstrate static call on MFPackage (layered dataset):
356356

357-
netcdf_info = flopy.mf6.mfpackage.MFPackage.netcdf_package(
357+
netcdf_meta = flopy.mf6.mfpackage.MFPackage.netcdf_package(
358358
mtype="GWF", ptype="NPF", mesh="LAYERED", auxiliary=["CONCENTRATION"], nlay=2
359359
)
360-
pprint(netcdf_info)
360+
pprint(netcdf_meta)

autotest/regression/test_model_netcdf.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def check_netcdf(path, mobj, mesh=None):
8787
@pytest.mark.regression
8888
def test_uzf01_model_scope_nofile(function_tmpdir, example_data_path):
8989
sim_name = "uzf01"
90-
netcdf = "nofile"
90+
netcdf = ""
9191
fname = f"{sim_name}.structured.nc"
9292
data_path_base = example_data_path / "mf6" / "netcdf"
9393
ws = function_tmpdir / sim_name
@@ -108,7 +108,7 @@ def test_uzf01_model_scope_nofile(function_tmpdir, example_data_path):
108108
@pytest.mark.regression
109109
def test_uzf02_model_scope_nofile(function_tmpdir, example_data_path):
110110
sim_name = "uzf02"
111-
netcdf = "nofile"
111+
netcdf = ""
112112
fname = f"{sim_name}.input.nc" # default
113113
data_path_base = example_data_path / "mf6" / "netcdf"
114114
ws = function_tmpdir / sim_name
@@ -230,7 +230,7 @@ def test_uzf02_sim_scope_fname(function_tmpdir, example_data_path):
230230
@pytest.mark.regression
231231
def test_uzf01_model_scope_nomesh(function_tmpdir, example_data_path):
232232
sim_name = "uzf01"
233-
netcdf = "nofile"
233+
netcdf = ""
234234
fname = f"{sim_name}.structured.nc"
235235
data_path_base = example_data_path / "mf6" / "netcdf"
236236
ws = function_tmpdir / sim_name
@@ -258,7 +258,7 @@ def test_uzf01_model_scope_nomesh(function_tmpdir, example_data_path):
258258
@pytest.mark.regression
259259
def test_uzf01_model_scope_mesh(function_tmpdir, example_data_path):
260260
sim_name = "uzf01"
261-
netcdf = "nofile"
261+
netcdf = ""
262262
mesh = "layered"
263263
fname = f"{sim_name}.layered.nc"
264264
data_path_base = example_data_path / "mf6" / "netcdf"
@@ -287,7 +287,7 @@ def test_uzf01_model_scope_mesh(function_tmpdir, example_data_path):
287287
@pytest.mark.regression
288288
def test_uzf02_model_scope(function_tmpdir, example_data_path):
289289
sim_name = "uzf02"
290-
netcdf = "nofile"
290+
netcdf = ""
291291
mesh = "layered"
292292
fname = f"{sim_name}.layered.nc"
293293
data_path_base = example_data_path / "mf6" / "netcdf"
@@ -335,11 +335,11 @@ def test_uzf01_pkg_scope(function_tmpdir, example_data_path):
335335
ds = gwf.modelgrid.dataset(modeltime=gwf.modeltime)
336336

337337
# get model netcdf info
338-
nc_info = gwf.netcdf_info()
338+
nc_meta = gwf.netcdf_meta()
339339

340340
# update dataset directly with required attributes
341-
for a in nc_info["attrs"]:
342-
ds.attrs[a] = nc_info["attrs"][a]
341+
for a in nc_meta["attrs"]:
342+
ds.attrs[a] = nc_meta["attrs"][a]
343343

344344
# add all packages and update data
345345
for p in gwf.packagelist:
@@ -373,25 +373,25 @@ def test_uzf01_pkg_scope_modify(function_tmpdir, example_data_path):
373373
ds = gwf.modelgrid.dataset(modeltime=gwf.modeltime)
374374

375375
# get model netcdf info
376-
nc_info = gwf.netcdf_info()
376+
nc_meta = gwf.netcdf_meta()
377377

378378
# update dataset directly with required attributes
379-
for a in nc_info["attrs"]:
380-
ds.attrs[a] = nc_info["attrs"][a]
379+
for a in nc_meta["attrs"]:
380+
ds.attrs[a] = nc_meta["attrs"][a]
381381

382382
# update dataset with `DIS` arrays
383383
dis = gwf.get_package("dis")
384384
ds = dis.update_dataset(ds)
385385

386386
# get npf package netcdf info
387387
npf = gwf.get_package("npf")
388-
nc_info = npf.netcdf_info()
388+
nc_meta = npf.netcdf_meta()
389389

390390
# update dataset with `NPF` arrays
391391
# change k varname and add attribute
392-
nc_info["k"]["varname"] = "npf_k_updated"
393-
nc_info["k"]["attrs"]["standard_name"] = "soil_hydraulic_conductivity_at_saturation"
394-
ds = npf.update_dataset(ds, netcdf_info=nc_info)
392+
nc_meta["k"]["varname"] = "npf_k_updated"
393+
nc_meta["k"]["attrs"]["standard_name"] = "soil_hydraulic_conductivity_at_saturation"
394+
ds = npf.update_dataset(ds, netcdf_meta=nc_meta)
395395

396396
# ic
397397
ic = gwf.get_package("ic")

flopy/discretization/grid.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1288,15 +1288,15 @@ def write_shapefile(self, filename="grid.shp", crs=None, prjfile=None, **kwargs)
12881288
)
12891289
return
12901290

1291-
def dataset(self, modeltime=None, mesh=None, encoding=None):
1291+
def dataset(self, modeltime=None, mesh=None, configuration=None):
12921292
"""
12931293
Method to generate baseline xarray dataset
12941294
12951295
Parameters
12961296
----------
12971297
modeltime : FloPy ModelTime object
12981298
mesh : mesh type
1299-
encoding : variable encoding dictionary
1299+
configuration : configuration dictionary
13001300
13011301
Returns
13021302
-------

flopy/discretization/structuredgrid.py

Lines changed: 91 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1770,12 +1770,12 @@ def get_plottable_layer_array(self, a, layer):
17701770
assert plotarray.shape == required_shape, msg
17711771
return plotarray
17721772

1773-
def dataset(self, modeltime=None, mesh=None, encoding=None):
1773+
def dataset(self, modeltime=None, mesh=None, configuration=None):
17741774
"""
17751775
modeltime : FloPy ModelTime object
17761776
mesh : mesh type
17771777
valid mesh types are "layered" or None
1778-
encoding : variable encoding dictionary
1778+
configuration : configuration dictionary
17791779
"""
17801780
from ..utils import import_optional_dependency
17811781

@@ -1788,11 +1788,11 @@ def dataset(self, modeltime=None, mesh=None, encoding=None):
17881788
ds.attrs["modflow_grid"] = "STRUCTURED"
17891789

17901790
if mesh and mesh.upper() == "LAYERED":
1791-
return self._layered_mesh_dataset(ds, modeltime, encoding)
1791+
return self._layered_mesh_dataset(ds, modeltime, configuration)
17921792
elif mesh is None:
1793-
return self._structured_dataset(ds, modeltime, encoding)
1793+
return self._structured_dataset(ds, modeltime, configuration)
17941794

1795-
def _layered_mesh_dataset(self, ds, modeltime=None, encoding=None):
1795+
def _layered_mesh_dataset(self, ds, modeltime=None, configuration=None):
17961796
FILLNA_INT32 = np.int32(-2147483647)
17971797
FILLNA_DBL = 9.96920996838687e36
17981798
lenunits = {0: "m", 1: "ft", 2: "m", 3: "m"}
@@ -1895,29 +1895,31 @@ def _layered_mesh_dataset(self, ds, modeltime=None, encoding=None):
18951895
ds["mesh_face_nodes"].attrs["_FillValue"] = FILLNA_INT32
18961896
ds["mesh_face_nodes"].attrs["start_index"] = np.int32(1)
18971897

1898-
if encoding is not None and "wkt" in encoding and encoding["wkt"] is not None:
1899-
ds = ds.assign({"projection": ([], np.int32(1))})
1900-
# wkt override to existing crs
1901-
ds["projection"].attrs["wkt"] = encoding["wkt"]
1898+
wkt_configured = (
1899+
configuration is not None
1900+
and "wkt" in configuration
1901+
and configuration["wkt"] is not None
1902+
)
1903+
1904+
if wkt_configured or self.crs is not None:
19021905
ds["mesh_node_x"].attrs["grid_mapping"] = "projection"
19031906
ds["mesh_node_y"].attrs["grid_mapping"] = "projection"
19041907
ds["mesh_face_x"].attrs["grid_mapping"] = "projection"
19051908
ds["mesh_face_y"].attrs["grid_mapping"] = "projection"
1906-
elif self.crs is not None:
19071909
ds = ds.assign({"projection": ([], np.int32(1))})
1908-
ds["projection"].attrs["wkt"] = self.crs.to_wkt()
1909-
ds["mesh_node_x"].attrs["grid_mapping"] = "projection"
1910-
ds["mesh_node_y"].attrs["grid_mapping"] = "projection"
1911-
ds["mesh_face_x"].attrs["grid_mapping"] = "projection"
1912-
ds["mesh_face_y"].attrs["grid_mapping"] = "projection"
1910+
if wkt_configured:
1911+
# wkt override to existing crs
1912+
ds["projection"].attrs["wkt"] = configuration["wkt"]
1913+
else:
1914+
ds["projection"].attrs["wkt"] = self.crs.to_wkt()
19131915

19141916
return ds
19151917

1916-
def _structured_dataset(self, ds, modeltime=None, encoding=None):
1918+
def _structured_dataset(self, ds, modeltime=None, configuration=None):
19171919
lenunits = {0: "m", 1: "ft", 2: "m", 3: "m"}
19181920

1919-
x = self.xoffset + self.xycenters[0]
1920-
y = self.yoffset + self.xycenters[1]
1921+
xc = self.xoffset + self.xycenters[0]
1922+
yc = self.yoffset + self.xycenters[1]
19211923
z = [float(x) for x in range(1, self.nlay + 1)]
19221924

19231925
# set coordinate var bounds
@@ -1943,8 +1945,8 @@ def _structured_dataset(self, ds, modeltime=None, encoding=None):
19431945
var_d = {
19441946
"time": (["time"], modeltime.totim),
19451947
"z": (["z"], z),
1946-
"y": (["y"], y),
1947-
"x": (["x"], x),
1948+
"y": (["y"], yc),
1949+
"x": (["x"], xc),
19481950
}
19491951
ds = ds.assign(var_d)
19501952

@@ -1970,17 +1972,78 @@ def _structured_dataset(self, ds, modeltime=None, encoding=None):
19701972
ds["x"].attrs["long_name"] = "Easting"
19711973
ds["x"].attrs["bounds"] = "x_bnds"
19721974

1973-
if encoding is not None and "wkt" in encoding and encoding["wkt"] is not None:
1974-
ds = ds.assign({"projection": ([], np.int32(1))})
1975-
# wkt override to existing crs
1976-
ds["projection"].attrs["crs_wkt"] = encoding["wkt"]
1975+
latlon_cfg = (
1976+
configuration is not None
1977+
and "latitude" in configuration
1978+
and configuration["latitude"] is not None
1979+
and "longitude" in configuration
1980+
and configuration["longitude"] is not None
1981+
)
1982+
1983+
if latlon_cfg or self.crs is not None:
1984+
if latlon_cfg:
1985+
lats = configuration["latitude"]
1986+
lons = configuration["longitude"]
1987+
else:
1988+
try:
1989+
import warnings
1990+
1991+
from pyproj import Proj
1992+
1993+
epsg_code = self.crs.to_epsg(min_confidence=90)
1994+
proj = Proj(
1995+
f"EPSG:{epsg_code}",
1996+
)
1997+
1998+
lats = []
1999+
lons = []
2000+
x_local = []
2001+
y_local = []
2002+
for y in self.xycenters[1]:
2003+
for x in self.xycenters[0]:
2004+
x_local.append(x)
2005+
y_local.append(y)
2006+
2007+
x_global, y_global = self.get_coords(x_local, y_local)
2008+
2009+
for i, x in enumerate(x_global):
2010+
lon, lat = proj(x, y_global[i], inverse=True)
2011+
lats.append(lat)
2012+
lons.append(lon)
2013+
2014+
lats = np.array(lats)
2015+
lons = np.array(lons)
2016+
2017+
except Exception as e:
2018+
warnings.warn(
2019+
f"Cannot create coordinates from CRS: {e}",
2020+
UserWarning,
2021+
)
2022+
2023+
# create coordinate vars
2024+
var_d = {
2025+
"lat": (["y", "x"], lats.reshape(yc.size, xc.size)),
2026+
"lon": (["y", "x"], lons.reshape(yc.size, xc.size)),
2027+
}
2028+
ds = ds.assign(var_d)
2029+
2030+
# set coordinate attributes
2031+
ds["lat"].attrs["units"] = "degrees_north"
2032+
ds["lat"].attrs["standard_name"] = "latitude"
2033+
ds["lat"].attrs["long_name"] = "latitude"
2034+
ds["lon"].attrs["units"] = "degrees_east"
2035+
ds["lon"].attrs["standard_name"] = "longitude"
2036+
ds["lon"].attrs["long_name"] = "longitude"
2037+
2038+
elif (
2039+
configuration is not None
2040+
and "wkt" in configuration
2041+
and configuration["wkt"] is not None
2042+
):
19772043
ds["x"].attrs["grid_mapping"] = "projection"
19782044
ds["y"].attrs["grid_mapping"] = "projection"
1979-
elif self.crs is not None:
19802045
ds = ds.assign({"projection": ([], np.int32(1))})
1981-
ds["projection"].attrs["crs_wkt"] = self.crs.to_wkt()
1982-
ds["x"].attrs["grid_mapping"] = "projection"
1983-
ds["y"].attrs["grid_mapping"] = "projection"
2046+
ds["projection"].attrs["crs_wkt"] = configuration["wkt"]
19842047

19852048
return ds
19862049

0 commit comments

Comments
 (0)