32
32
QgsProcessingException
33
33
)
34
34
from .g_s_defaults import (
35
+ annotation_field_name ,
35
36
def_tables_dict ,
37
+ def_sections_dict ,
36
38
annotation_field_name
37
39
)
38
40
39
41
40
42
41
43
44
+ def get_annotations_from_raw_df (df_raw ):
45
+ """
46
+ extracts annotations / descriptions from the dataframe
47
+ :param pd.DataFrame df_raw
48
+ :return: dict
49
+ """
50
+ if annotation_field_name in df_raw .columns :
51
+ annot_dict = {k : v for k , v in zip (df_raw ['Name' ], df_raw [annotation_field_name ])}
52
+ annot_dict = {k : v for k , v in annot_dict .items () if pd .notna (v )}
53
+ annot_dict = {k : v for k , v in annot_dict .items () if len (v ) > 0 }
54
+ else :
55
+ annot_dict = {}
56
+ return annot_dict
57
+
58
+
42
59
43
60
def data_preparation (data_name , data_entry , export_params ):
44
61
"""
@@ -50,14 +67,15 @@ def data_preparation(data_name, data_entry, export_params):
50
67
if data_name == 'OPTIONS' :
51
68
from .g_s_options import get_options_from_table
52
69
options_df , main_infiltration_method = get_options_from_table (
53
- data_entry ['data' ][ ' OPTIONS' ].copy ()
70
+ data_entry ['OPTIONS' ].copy ()
54
71
)
55
72
export_params ['main_infiltration_method' ] = main_infiltration_method
56
73
return {'OPTIONS' : {'data' : options_df }}
57
- elif data_name == 'SUBCATCHMENTS' :
74
+
75
+ elif data_name == 'SUBCATCHMENTS' :
58
76
from .g_s_subcatchments import get_subcatchments_from_layer
59
77
subcatchments_df , subareas_df , infiltration_df = get_subcatchments_from_layer (
60
- data_entry [ 'data' ][ 'SUBCATCHMENTS' ] .copy (),
78
+ data_entry .copy (),
61
79
export_params ['main_infiltration_method' ]
62
80
)
63
81
return {
@@ -69,7 +87,7 @@ def data_preparation(data_name, data_entry, export_params):
69
87
elif data_name == 'CONDUITS' :
70
88
from .g_s_links import get_conduits_from_shapefile
71
89
conduits_df , xsections_df , losses_df = get_conduits_from_shapefile (
72
- data_entry [ 'data' ][ 'CONDUITS' ] .copy ()
90
+ data_entry .copy ()
73
91
)
74
92
return {
75
93
'CONDUITS' : {'data' : conduits_df },
@@ -79,70 +97,71 @@ def data_preparation(data_name, data_entry, export_params):
79
97
80
98
elif data_name == 'PUMPS' :
81
99
from .g_s_links import get_pumps_from_shapefile
82
- pumps_df = get_pumps_from_shapefile (data_entry [ 'data' ][ 'PUMPS' ] .copy ())
100
+ pumps_df = get_pumps_from_shapefile (data_entry .copy ())
83
101
return {'PUMPS' : {'data' : pumps_df }}
84
102
85
103
elif data_name == 'WEIRS' :
86
104
from .g_s_links import get_weirs_from_shapefile
87
- weirs_df , xsections_df = get_weirs_from_shapefile (data_entry [ 'data' ][ 'WEIRS' ] .copy ())
105
+ weirs_df , xsections_df = get_weirs_from_shapefile (data_entry .copy ())
88
106
return {
89
107
'WEIRS' : {'data' : weirs_df },
90
108
'XSECTIONS' : {'data' : xsections_df }
91
109
}
92
110
93
111
elif data_name == 'OUTLETS' :
94
112
from .g_s_links import get_outlets_from_shapefile
95
- outlets_df = get_outlets_from_shapefile (data_entry [ 'data' ][ 'OUTLETS' ] .copy ())
113
+ outlets_df = get_outlets_from_shapefile (data_entry .copy ())
96
114
return {'OUTLETS' : {'data' : outlets_df }}
97
115
98
116
elif data_name == 'ORIFICES' :
99
117
from .g_s_links import get_orifices_from_shapefile
100
- orifices_df , xsections_df = get_orifices_from_shapefile (data_entry [ 'data' ][ 'ORIFICES' ] .copy ())
118
+ orifices_df , xsections_df = get_orifices_from_shapefile (data_entry .copy ())
101
119
return {
102
120
'ORIFICES' : {'data' : orifices_df },
103
121
'XSECTIONS' : {'data' : xsections_df }
104
122
}
105
123
106
124
elif data_name == 'JUNCTIONS' :
107
- from .g_s_nodes import get_junctions_from_shapefile
108
- junctions_df = get_junctions_from_shapefile (data_entry [ 'data' ][ 'JUNCTIONS' ] .copy ())
125
+ from .g_s_nodes import get_junctions_from_layer
126
+ junctions_df = get_junctions_from_layer (data_entry .copy ())
109
127
return {'JUNCTIONS' : {'data' : junctions_df }}
110
128
111
129
elif data_name == 'OUTFALLS' :
112
130
from .g_s_nodes import get_outfalls_from_shapefile
113
- outfalls_df = get_outfalls_from_shapefile (data_entry [ 'data' ][ 'OUTFALLS' ] .copy ())
131
+ outfalls_df = get_outfalls_from_shapefile (data_entry .copy ())
114
132
return {'OUTFALLS' : {'data' : outfalls_df }}
115
133
116
- elif data_name == 'STORAGES ' :
117
- from .g_s_nodes import get_storages_from_shapefile
118
- storages_df = get_storages_from_shapefile (data_entry [ 'data' ][ 'STORAGES' ] .copy ())
119
- return {'STORAGES ' : {'data' : storages_df }}
134
+ elif data_name == 'STORAGE ' :
135
+ from .g_s_nodes import get_storages_from_layer
136
+ storages_df = get_storages_from_layer (data_entry .copy ())
137
+ return {'STORAGE ' : {'data' : storages_df }}
120
138
121
139
elif data_name == 'DIVIDERS' :
122
- from .g_s_nodes import get_dividers_from_shapefile
123
- dividers_df = get_dividers_from_shapefile (data_entry [ 'data' ][ 'DIVIDERS' ] .copy ())
140
+ from .g_s_nodes import get_dividers_from_layer
141
+ dividers_df = get_dividers_from_layer (data_entry .copy ())
124
142
return {'DIVIDERS' : {'data' : dividers_df }}
125
-
143
+
144
+ #######
126
145
elif data_name == 'CURVES' :
127
146
from .g_s_export_helpers import get_curves_from_table
128
147
curves_dict = get_curves_from_table (
129
- data_entry ['data' ][ ' CURVES' ],
148
+ data_entry ['CURVES' ],
130
149
name_col = 'Name'
131
150
)
132
151
return {'CURVES' : {'data' : curves_dict }}
133
152
134
153
elif data_name == 'PATTERNS' :
135
154
from .g_s_export_helpers import get_patterns_from_table
136
155
patterns_dict = get_patterns_from_table (
137
- data_entry ['data' ][ ' PATTERNS' ],
156
+ data_entry ['PATTERNS' ],
138
157
name_col = 'Name'
139
158
)
140
159
return {'PATTERNS' : {'data' : patterns_dict }}
141
160
142
161
elif data_name == 'TIMESERIES' :
143
162
from .g_s_export_helpers import get_timeseries_from_table
144
163
timeseries_dict = get_timeseries_from_table (
145
- data_entry ['data' ][ ' TIMESERIES' ],
164
+ data_entry ['TIMESERIES' ],
146
165
name_col = 'Name' ,
147
166
feedback = export_params ['feedback' ]
148
167
)
@@ -151,17 +170,28 @@ def data_preparation(data_name, data_entry, export_params):
151
170
elif data_name == 'INFLOWS' :
152
171
from .g_s_nodes import get_inflows_from_table
153
172
inflows_dict = get_inflows_from_table (
154
- data_entry ['data' ][ ' INFLOWS' ],
173
+ data_entry ['INFLOWS' ],
155
174
all_nodes ,
156
175
feedback = export_params ['feedback' ]
157
176
)
158
177
return {'INFLOWS' : {'data' : inflows_dict }}
159
178
160
179
elif data_name == 'QUALITY' :
161
180
from .g_s_quality import get_quality_params_from_table
162
- quality_df = get_quality_params_from_table (data_entry ['data' ][ ' QUALITY' ].copy ())
181
+ quality_df = get_quality_params_from_table (data_entry ['QUALITY' ].copy ())
163
182
return {'QUALITY' : {'data' : quality_df }}
164
183
184
+ elif data_name == 'RAINGAGES' :
185
+ from .g_s_subcatchments import get_raingage_from_qgis_row
186
+ rg_features_df = data_entry .copy ()
187
+ rg_features_df = rg_features_df .apply (
188
+ lambda x : get_raingage_from_qgis_row (x ),
189
+ axis = 1
190
+ )
191
+ rg_inp_cols = def_sections_dict ['RAINGAGES' ]
192
+ rg_features_df = rg_features_df [rg_inp_cols ] # drop unnecessary
193
+ return {'RAINGAGES' : {'data' : rg_features_df }}
194
+
165
195
else :
166
196
raise QgsProcessingException (f'Unknown data name: { data_name } ' )
167
197
@@ -175,33 +205,40 @@ def use_z_if_available(
175
205
coords ,
176
206
use_z_bool ,
177
207
feedback ,
178
- geom_type = 'Points ' ,
208
+ link_offsets = 'elevation ' ,
179
209
layer_name = None
180
210
):
181
211
"""
182
- replaces Elevation or InOffset/OutOffset by Z_Coords+
212
+ replaces Elevation or InOffset/OutOffset by Z_Coords and removes Z_Coords from coords dict
183
213
:param pd.DataFrame df
184
- :param pd.DataFrame / dict coords
214
+ :param dict coords
185
215
:param bool use_z_bool
186
216
:param QgsProcessingFeedback feedback
217
+ :param str link_offsets
218
+ :param str layer_name
187
219
"""
188
- if geom_type == 'lines' :
220
+ if list (coords .keys ())[0 ] == 'VERTICES' : # lines
221
+ coords_dict = coords ['VERTICES' ]['data' ]
189
222
if use_z_bool :
190
223
# if not na
191
- df ['InOffset' ] = [coords [l_name ]['Z_Coord' ].tolist ()[0 ] for l_name in df ['Name' ]]
192
- df ['OutOffset' ] = [coords [l_name ]['Z_Coord' ].tolist ()[- 1 ] for l_name in df ['Name' ]]
193
- coords = {
194
- l_name : df_coord [
224
+ df ['InOffset' ] = [coords_dict [l_name ]['Z_Coord' ].tolist ()[0 ] for l_name in df ['Name' ]]
225
+ df ['OutOffset' ] = [coords_dict [l_name ]['Z_Coord' ].tolist ()[- 1 ] for l_name in df ['Name' ]]
226
+ coords_dict_without_z = {
227
+ l_name : df_i [
195
228
['X_Coord' , 'Y_Coord' ]
196
- ] for l_name , df_coord in coords .items ()
229
+ ] for l_name , df_i in coords_dict .items ()
197
230
} # remove z
198
- else :
231
+ coords ['VERTICES' ]['data' ] = coords_dict_without_z
232
+ else : # points -> pd.DataFrame
233
+ coords_df = coords ['COORDINATES' ]['data' ]
199
234
if use_z_bool :
200
235
# if not na
201
- df ['Elevation' ] = coords ['Z_Coord' ]
202
- coords .drop ("Z_Coord" , axis = 1 , inplace = True )
236
+ df ['Elevation' ] = coords_df ['Z_Coord' ]
237
+ coords_df_without_z = coords_df .drop ("Z_Coord" , axis = 1 , inplace = False )
238
+ coords ['COORDINATES' ]['data' ] = coords_df_without_z
203
239
return df , coords
204
240
241
+
205
242
def get_coords_from_geometry (df ):
206
243
"""
207
244
extracts coords from any gpd.geodataframe
@@ -258,23 +295,35 @@ def get_coords_from_geometry(df):
258
295
['Name' , 'X_Coord' , 'Y_Coord' , 'Z_Coord' ]
259
296
)
260
297
)
261
- return extr_coords_df
298
+ return { 'COORDINATES' : { 'data' : extr_coords_df }}
262
299
263
300
# case lines
264
301
elif all (
265
302
QgsWkbTypes .displayString (
266
303
g_type .wkbType ()
267
304
) in line_t_names for g_type in df .geometry
268
305
):
269
- return {na : extract_xy_from_line (line_geom ) for line_geom , na in zip (df .geometry , df .Name )}
306
+ extracted_vertices = {
307
+ na : extract_xy_from_line (line_geom ) for line_geom , na in zip (
308
+ df .geometry ,
309
+ df .Name
310
+ )
311
+ }
312
+ return {'VERTICES' : {'data' : extracted_vertices }}
270
313
271
314
# case polygons
272
315
elif all (
273
316
QgsWkbTypes .displayString (
274
317
g_type .wkbType ()
275
318
) in polygon_t_names for g_type in df .geometry
276
319
):
277
- return {na : extract_xy_from_area (polyg_geom ) for polyg_geom , na in zip (df .geometry , df .Name )}
320
+ extracted_vertices = {
321
+ na : extract_xy_from_area (polyg_geom ) for polyg_geom , na in zip (
322
+ df .geometry ,
323
+ df .Name
324
+ )
325
+ }
326
+ return {'POLYGONS' : {'data' : extracted_vertices }}
278
327
else :
279
328
raise QgsProcessingException (
280
329
'Geometry type of one or more features could not be handled'
0 commit comments