Skip to content

Commit

Permalink
Merge pull request #144 from agrouaze/fix_pol_in_calibration_LUT
Browse files Browse the repository at this point in the history
make the raw calibration LUTs pola dependent
  • Loading branch information
agrouaze authored Mar 1, 2023
2 parents 671dfe8 + 972fae8 commit c7a4872
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 12 deletions.
27 changes: 20 additions & 7 deletions src/xsar/sentinel1_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -944,28 +944,39 @@ def get_annotation_definitions(self):

def get_calibration_luts(self):
"""
get original (ie not interpolation) xr.Dataset sigma0 and gamma0 Look Up Tables to apply calibration
"""
#sigma0_lut = self.xml_parser.get_var(self.files['calibration'].iloc[0], 'calibration.sigma0_lut',describe=True)
luts = self.xml_parser.get_compound_var(self.files['calibration'].iloc[0],'luts_raw')
return luts
pols = []
tmp = []
for pol_code, xml_file in self.files['calibration'].items():
luts_ds = self.xml_parser.get_compound_var(xml_file,'luts_raw')
pol = os.path.basename(xml_file).split('-')[4].upper()
pols.append(pol)
tmp.append(luts_ds)
ds = xr.concat(tmp, pd.Index(pols, name="pol"))
# ds.attrs = {'description':
# 'original (ie not interpolation) xr.Dataset sigma0 and gamma0 Look Up Tables'}
return ds

def get_noise_azi_raw(self):
tmp = []
pols = []
for pol_code, xml_file in self.files['noise'].items():
#pol = self.files['polarization'].cat.categories[pol_code-1]
pol = os.path.basename(xml_file).split('-')[4].lower()
pol = os.path.basename(xml_file).split('-')[4].upper()
pols.append(pol)
if self.product == 'SLC':
noise_lut_azi_raw_ds = self.xml_parser.get_compound_var(xml_file,'noise_lut_azi_raw_slc')
else:
noise_lut_azi_raw_ds = self.xml_parser.get_compound_var(xml_file, 'noise_lut_azi_raw_grd')
for vari in noise_lut_azi_raw_ds:
if 'noiseLut_' in vari:
if 'noise_lut' in vari:
varitmp = 'noiseLut'
hihi = self.xml_parser.get_var(self.files['noise'].iloc[0], 'noise.azi.%s' % varitmp,
describe=True)
elif vari == 'noiseLut' and self.product=='WV': #WV case
elif vari == 'noise_lut' and self.product=='WV': #WV case
hihi = 'dummy variable, noise is not defined in azimuth for WV acquisitions'
else:
varitmp = vari
Expand All @@ -982,11 +993,13 @@ def get_noise_range_raw(self):
pols = []
for pol_code, xml_file in self.files['noise'].items():
#pol = self.files['polarization'].cat.categories[pol_code - 1]
pol = os.path.basename(xml_file).split('-')[4].lower()
pol = os.path.basename(xml_file).split('-')[4].upper()
pols.append(pol)
noise_lut_range_raw_ds = self.xml_parser.get_compound_var(xml_file, 'noise_lut_range_raw')
for vari in noise_lut_range_raw_ds:
hihi = self.xml_parser.get_var(self.files['noise'].iloc[0], 'noise.range.%s' % vari,
if 'noise_lut' in vari:
varitmp = 'noiseLut'
hihi = self.xml_parser.get_var(self.files['noise'].iloc[0], 'noise.range.%s' % varitmp,
describe=True)
noise_lut_range_raw_ds[vari].attrs['description'] = hihi
tmp.append(noise_lut_range_raw_ds)
Expand Down
10 changes: 5 additions & 5 deletions src/xsar/sentinel1_xml_mappings.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def noise_lut_range_raw(lines, samples, noiseLuts):
normalized_noise_luts.append(noiseLuts[uu][0:minimum_pts])
normalized_samples.append(samples[uu][0:minimum_pts])
tmp_noise = np.stack(normalized_noise_luts)
ds['noiseLut'] = xr.DataArray(tmp_noise,
ds['noise_lut'] = xr.DataArray(tmp_noise,
coords={'line': lines, 'sample': samples[0][0:minimum_pts]},
dims=['line', 'sample'])
# ds['sample'] = xr.DataArray(np.stack(normalized_samples), coords={'lines': lines, 'sample_index': np.arange(minimum_pts)},
Expand All @@ -376,7 +376,7 @@ def noise_lut_azi_raw_grd(line_azi,line_azi_start,line_azi_stop,
sample_azi_start, sample_azi_stop, noise_azi_lut, swath):
ds = xr.Dataset()
for ii, swathi in enumerate(swath): # with 2018 data the noise vector are not the same size -> stacking impossible
ds['noiseLut_%s' % swathi] = xr.DataArray(noise_azi_lut[ii], coords={'line': line_azi[ii]}, dims=['line'])
ds['noise_lut_%s' % swathi] = xr.DataArray(noise_azi_lut[ii], coords={'line': line_azi[ii]}, dims=['line'])

# ds['noiseLut'] = xr.DataArray(np.stack(noise_azi_lut).T, coords={'line_index': np.arange(len(line_azi[0])), 'swath': swath},
# dims=['line_index', 'swath'])
Expand All @@ -394,18 +394,18 @@ def noise_lut_azi_raw_slc(line_azi,line_azi_start,line_azi_stop,
ds = xr.Dataset()
#if 'WV' in mode: # there is no noise in azimuth for WV acquisitions
if swath == []: #WV SLC case
ds['noiseLut'] = xr.DataArray(1.) # set noise_azimuth to one to make post steps like noise_azi*noise_range always possible
ds['noise_lut'] = xr.DataArray(1.) # set noise_azimuth to one to make post steps like noise_azi*noise_range always possible
ds['line_start'] = xr.DataArray(line_azi_start, attrs={'swath': swath})
ds['line_stop'] = xr.DataArray(line_azi_stop, attrs={'swath': swath})
ds['sample_start'] = xr.DataArray(sample_azi_start, attrs={'swath': swath})
ds['sample_stop'] = xr.DataArray(sample_azi_stop, attrs={'swath': swath})
else:
ds['noiseLut'] = xr.DataArray(noise_azi_lut[0], coords={'line': line_azi[0]}, dims=['line']) # only on subswath opened
ds['noise_lut'] = xr.DataArray(noise_azi_lut[0], coords={'line': line_azi[0]}, dims=['line']) # only on subswath opened
ds['line_start'] = xr.DataArray(line_azi_start[0], attrs={'swath': swath})
ds['line_stop'] = xr.DataArray(line_azi_stop[0], attrs={'swath': swath})
ds['sample_start'] = xr.DataArray(sample_azi_start[0], attrs={'swath': swath})
ds['sample_stop'] = xr.DataArray(sample_azi_stop[0], attrs={'swath': swath})
# ds['noiseLut'] = xr.DataArray(np.stack(noise_azi_lut).T, coords={'line_index': np.arange(len(line_azi[0])), 'swath': swath},
# ds['noise_lut'] = xr.DataArray(np.stack(noise_azi_lut).T, coords={'line_index': np.arange(len(line_azi[0])), 'swath': swath},
# dims=['line_index', 'swath'])
# ds['line'] = xr.DataArray(np.stack(line_azi).T, coords={'line_index': np.arange(len(line_azi[0])), 'swath': swath},
# dims=['line_index', 'swath'])
Expand Down

0 comments on commit c7a4872

Please sign in to comment.