Skip to content

Commit 77e9a38

Browse files
committed
Merge remote-tracking branch 'origin/add_attributes'
2 parents 99c3af6 + e277bae commit 77e9a38

File tree

1 file changed

+160
-8
lines changed

1 file changed

+160
-8
lines changed

xarray_sentinel/sentinel1.py

+160-8
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,10 @@ def open_calibration_dataset(
7777
calibration_vectors = esa_safe.parse_tag_as_list(
7878
calibration, ".//calibrationVector", "calibration"
7979
)
80-
80+
cal_attrs = esa_safe.parse_tag(
81+
calibration, ".//calibrationInformation", "calibration"
82+
)
83+
attrs["absolute_calibration_constant"] = cal_attrs["absoluteCalibrationConstant"]
8184
azimuth_time_list = []
8285
pixel_list = []
8386
line_list = []
@@ -108,8 +111,8 @@ def open_calibration_dataset(
108111
)
109112
data_vars = {
110113
"azimuth_time": ("line", [np.datetime64(dt, "ns") for dt in azimuth_time_list]),
111-
"sigmaNought": (("line", "pixel"), sigmaNought_list),
112-
"betaNought": (("line", "pixel"), betaNought_list),
114+
"sigma_nought": (("line", "pixel"), sigmaNought_list),
115+
"beta_nought": (("line", "pixel"), betaNought_list),
113116
"gamma": (("line", "pixel"), gamma_list),
114117
"dn": (("line", "pixel"), dn_list),
115118
}
@@ -118,6 +121,147 @@ def open_calibration_dataset(
118121
return xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs)
119122

120123

124+
def open_reference_replica_dataset(
125+
annotation_path: esa_safe.PathType, attrs: Dict[str, Any] = {}
126+
) -> xr.Dataset:
127+
reference_replica = esa_safe.parse_tag_as_list(
128+
annotation_path, ".//replicaInformationList/replicaInformation/referenceReplica"
129+
)[0]
130+
attrs.update({
131+
"azimuth_time": reference_replica["azimuthTime"],
132+
"chirp_source": reference_replica["chirpSource"],
133+
"pg_source": reference_replica["pgSource"],
134+
"time_delay": reference_replica["timeDelay"],
135+
"gain": reference_replica["gain"],
136+
})
137+
138+
reference_replica_amplitude_coefficients = [float(v) for v in
139+
reference_replica["amplitudeCoefficients"]["$"].split()]
140+
141+
reference_replica_phase_coefficients = [float(v) for v in reference_replica["phaseCoefficients"]["$"].split()]
142+
143+
coords: Dict[str, Any] = {
144+
"degree": range(len(reference_replica_amplitude_coefficients))
145+
}
146+
data_vars = {
147+
"reference_replica_amplitude_coefficients": (
148+
("degree"), reference_replica_amplitude_coefficients),
149+
"reference_replica_phase_coefficients": (
150+
("degree"), reference_replica_phase_coefficients)
151+
}
152+
da = xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs)
153+
return da
154+
155+
156+
def open_antenna_pattern(
157+
annotation_path: esa_safe.PathType, attrs: Dict[str, Any] = {}
158+
) -> xr.Dataset:
159+
antenna_pattern_list = esa_safe.parse_tag_as_list(
160+
annotation_path, ".//antennaPattern/antennaPatternList/antennaPattern"
161+
)
162+
163+
slant_range_time_list = []
164+
azimuth_time_list = []
165+
elevation_angle_list = []
166+
elevation_pattern_list = []
167+
incidence_angle_list = []
168+
terrain_height_list = []
169+
roll_list = []
170+
171+
for vector in antenna_pattern_list:
172+
azimuth_time_list.append(vector["azimuthTime"])
173+
slant_range_time = np.fromstring(vector["slantRangeTime"]["$"], dtype=np.float32, sep=" ")
174+
slant_range_time_list.append(slant_range_time)
175+
176+
elevation_angle = np.fromstring(vector["elevationAngle"]["$"], dtype=np.float32, sep=" ")
177+
elevation_angle_list.append(elevation_angle)
178+
179+
elevation_pattern = np.fromstring(vector["elevationPattern"]["$"], dtype=np.float32, sep=" ")
180+
elevation_pattern_list.append(
181+
elevation_pattern[::2] * 1j + elevation_pattern[1::2]
182+
)
183+
184+
incidence_angle = np.fromstring(vector["slantRangeTime"]["$"], dtype=np.float32, sep=" ")
185+
incidence_angle_list.append(incidence_angle)
186+
187+
terrain_height_list.append(vector["terrainHeight"])
188+
roll_list.append(vector["roll"])
189+
190+
slant_range_time_list = np.array(slant_range_time_list)
191+
if (slant_range_time_list - slant_range_time_list[0]).any():
192+
raise ValueError(
193+
"Unable to organise noise vectors in a regular line-pixel grid"
194+
)
195+
data_vars = {
196+
"elevation_angle": (
197+
("azimuth_time", "slant_range_time"), np.array(elevation_angle_list)),
198+
# "elevation_pattern": (
199+
# ( "azimuth_time", "slant_range_time"), np.array(elevation_pattern_list)),
200+
"incidence_angle": (
201+
( "azimuth_time", "slant_range_time"), np.array(incidence_angle_list)),
202+
"terrain_height": ("azimuth_time", terrain_height_list),
203+
"roll": ("azimuth_time", roll_list),
204+
}
205+
coords = {"slant_range_time": slant_range_time_list[0], "azimuth_time": [np.datetime64(dt, "ns") for dt in azimuth_time_list]}
206+
da = xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs)
207+
return da
208+
209+
210+
def open_replica_dataset(
211+
annotation_path: esa_safe.PathType, attrs: Dict[str, Any] = {}
212+
) -> xr.Dataset:
213+
replicaList = esa_safe.parse_tag_as_list(
214+
annotation_path, ".//replicaInformationList/replicaInformation/replicaList/replica"
215+
)
216+
azimuth_time_list = []
217+
cross_correlation_bandwidth_list = []
218+
cross_correlation_pslr_list = []
219+
cross_correlation_peak_location_list = []
220+
reconstructed_replica_valid_flag_list = []
221+
pg_product_phase_list = []
222+
pg_product_amplitude_list = []
223+
model_pg_product_amplitude_list = []
224+
model_pg_product_phase_list = []
225+
relative_pg_product_valid_flag_list = []
226+
absolute_pg_product_valid_flag_list = []
227+
internal_time_delay_list = []
228+
229+
for replica in replicaList:
230+
azimuth_time_list.append(replica["azimuthTime"])
231+
cross_correlation_bandwidth_list.append(replica["crossCorrelationBandwidth"])
232+
cross_correlation_pslr_list.append(replica["crossCorrelationPslr"])
233+
cross_correlation_peak_location_list.append(replica["crossCorrelationPeakLocation"])
234+
reconstructed_replica_valid_flag_list.append(replica["reconstructedReplicaValidFlag"])
235+
pg_product_amplitude_list.append(replica["pgProductAmplitude"])
236+
pg_product_phase_list.append(replica["pgProductPhase"])
237+
model_pg_product_amplitude_list.append(replica["modelPgProductAmplitude"])
238+
model_pg_product_phase_list.append(replica["modelPgProductPhase"])
239+
relative_pg_product_valid_flag_list.append(replica["relativePgProductValidFlag"])
240+
absolute_pg_product_valid_flag_list.append(replica["absolutePgProductValidFlag"])
241+
internal_time_delay_list.append(replica["internalTimeDelay"])
242+
243+
coords: Dict[str, Any] = {
244+
"azimuth_time": [np.datetime64(dt, "ns") for dt in azimuth_time_list],
245+
}
246+
data_vars = {
247+
"cross_correlation_bandwidth": (("azimuth_time"), cross_correlation_bandwidth_list),
248+
"cross_correlation_pslr": (("azimuth_time"), cross_correlation_pslr_list),
249+
"cross_correlation_peak_location": (("azimuth_time"), cross_correlation_peak_location_list),
250+
"reconstructed_replica_valid_flag": (("azimuth_time"), reconstructed_replica_valid_flag_list),
251+
"pg_product_amplitude": (("azimuth_time"), pg_product_amplitude_list),
252+
"pg_product_phase": (("azimuth_time"), pg_product_phase_list),
253+
"model_pg_product_amplitude": (("azimuth_time"), model_pg_product_amplitude_list),
254+
"model_pg_product_phase": (("azimuth_time"), model_pg_product_phase_list),
255+
"relative_pg_product_valid_flag": (("azimuth_time"), relative_pg_product_valid_flag_list),
256+
"absolute_pg_product_valid_flag": (("azimuth_time"), absolute_pg_product_valid_flag_list),
257+
"internal_time_delay": (("azimuth_time"), internal_time_delay_list),
258+
259+
}
260+
261+
da = xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs)
262+
return da
263+
264+
121265
def open_noise_range_dataset(
122266
noise: esa_safe.PathType, attrs: Dict[str, Any] = {}
123267
) -> xr.Dataset:
@@ -158,21 +302,21 @@ def open_noise_azimuth_dataset(
158302

159303
first_range_sample = []
160304
line_list = []
161-
noiseAzimuthLut_list = []
305+
noise_azimuth_lut_list = []
162306
for vector in noise_vectors:
163307
first_range_sample.append(vector["firstRangeSample"])
164308
line = np.fromstring(vector["line"]["$"], dtype=int, sep=" ")
165309
line_list.append(line)
166-
noiseAzimuthLut = np.fromstring(
310+
noise_azimuth_lut = np.fromstring(
167311
vector["noiseAzimuthLut"]["$"], dtype=np.float32, sep=" "
168312
)
169-
noiseAzimuthLut_list.append(noiseAzimuthLut)
313+
noise_azimuth_lut_list.append(noise_azimuth_lut)
170314

171-
# BROKEN: GRDs have line and noiseAzimuthLut of different size, we take the first one
315+
# BROKEN: GRDs have line and noise_azimuth_lut of different size, we take the first one
172316
data_vars = {}
173317
coords = {}
174318
if first_range_sample:
175-
data_vars["noiseAzimuthLut"] = ("line", noiseAzimuthLut_list[0])
319+
data_vars["noise_azimuth_lut"] = ("line", noise_azimuth_lut_list[0])
176320
coords["line"] = line_list[0]
177321

178322
return xr.Dataset(data_vars=data_vars, coords=coords, attrs=attrs)
@@ -529,7 +673,12 @@ def find_available_groups(
529673
"azimuth_fm_rate",
530674
"dc_estimate",
531675
"gcp",
676+
"replica",
677+
"reference_replica",
678+
"antenna_pattern"
532679
]:
680+
if product_type == "GRD" and metadata_group == "antenna_pattern":
681+
continue
533682
groups[f"{swath_pol_group}/{metadata_group}"] = [abspath]
534683
if product_type == "GRD":
535684
groups[f"{swath_pol_group}/coordinate_conversion"] = [abspath]
@@ -947,6 +1096,9 @@ def ground_range_to_slant_range_time(
9471096
"calibration": open_calibration_dataset,
9481097
"noise_range": open_noise_range_dataset,
9491098
"noise_azimuth": open_noise_azimuth_dataset,
1099+
"replica": open_replica_dataset,
1100+
"reference_replica": open_reference_replica_dataset,
1101+
"antenna_pattern": open_antenna_pattern,
9501102
}
9511103

9521104

0 commit comments

Comments
 (0)