Skip to content

Commit

Permalink
Merge pull request #204 from umr-lops/rangenoisepatchMarch2024
Browse files Browse the repository at this point in the history
Patch to get correct line coordinates in the range noise dataset for SLC products
  • Loading branch information
agrouaze authored May 14, 2024
2 parents 8bb1511 + 9411321 commit 9ea17e7
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 30 deletions.
22 changes: 16 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,18 @@

Synthetic Aperture Radar (SAR) Level-1 GRD python mapper for efficient xarray/dask based processing


This python library allow to apply different operation on SAR images such as:
- calibration
- de-noising
- re-sampling

The library is working regardless it is a **Sentinel-1**, a **RadarSAT-2** or a **RCM** product.

The library is providing variables such as `longitude` , `latitude`, `incidence_angle` or `sigma0` at native product resolution or coarser resolution.

The library perform resampling that are suitable for GRD (i.e. ground projected) SAR images. The same method is used for WV SLC, and one can consider the approximation still valid because the WV image is only 20 km X 20 km.

But for TOPS (IW or EW) SLC products we recommend to use [xsarslc](/~https://github.com/umr-lops/xsar_slc.git)

# Install

Expand All @@ -12,7 +23,7 @@ Synthetic Aperture Radar (SAR) Level-1 GRD python mapper for efficient xarray/da
1) Install `xsar` (without the readers)

For a faster installation and less conflicts between packages, it is better
to make the installation with `mamba`
to make the installation with `micromamba`

```bash
conda install -c conda-forge mamba
Expand All @@ -21,14 +32,14 @@ conda install -c conda-forge mamba
2) install `xsar` (without the readers)

```bash
mamba install -c conda-forge xsar
micromamba install -c conda-forge xsar
```
3) Add optional dependencies

- Add use of Radarsat-2 :

```bash
mamba install -c conda-forge xradarsat2
micromamba install -c conda-forge xradarsat2
```

- Add use of RCM (RadarSat Constellation Mission)
Expand All @@ -40,7 +51,7 @@ pip install xarray-safe-rcm
- Add use of Sentinel-1

```bash
mamba install -c conda-forge xarray-safe-s1
micromamba install -c conda-forge xarray-safe-s1
```

## Pypi
Expand Down Expand Up @@ -125,4 +136,3 @@ Attributes: (12/14)
# More information

For more install options and to use xsar, see [documentation](https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsar/)

23 changes: 14 additions & 9 deletions docs/examples/projections.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@
"source": [
"sigma0_image = gv.Image(sigma0_proj.sel(pol='VV')).opts(alpha=0.3, cmap='gray', clim=(0,0.05))\n",
"#(gv.tile_sources.Wikipedia * gv.feature.land.options(scale='50m') * sigma0_image).opts(width=600,height=600)\n",
"(gv.tile_sources.Wikipedia * sigma0_image * gv.feature.coastline.options(scale='10m')).opts(width=600,height=600)"
"(gv.tile_sources.EsriImagery * sigma0_image * gv.feature.coastline.options(scale='10m')).opts(width=600,height=600)"
]
},
{
Expand All @@ -170,7 +170,9 @@
},
"outputs": [],
"source": [
"sigma0_proj.sel(pol='VV').rio.to_raster('/tmp/sigma0_nocolor.tiff')"
"s0vvproj = sigma0_proj.sel(pol='VV')\n",
"s0vvproj.attrs['name'] = 'sigma0 VV projected using rioxarray'\n",
"s0vvproj.rio.to_raster('/tmp/sigma0_nocolor.tiff')"
]
},
{
Expand All @@ -195,7 +197,7 @@
"tmp = rioxarray.open_rasterio('/tmp/sigma0_nocolor.tiff')\n",
"tmp = tmp.rename('sigma0_nocolor')\n",
"tmp2 = gv.util.from_xarray(tmp)\n",
"gv.tile_sources.Wikipedia * gv.Image(tmp2,vdims='sigma0_nocolor',kdims=['x','y']).opts(alpha=0.7, cmap='gray', clim=(0,0.05),colorbar=True,width=500,height=200)"
"gv.tile_sources.EsriImagery * gv.Image(tmp2,kdims=['x','y']).opts(alpha=0.7, cmap='gray', clim=(0,0.05),colorbar=True,width=500,height=200) # vdims='sigma0_nocolor'"
]
},
{
Expand Down Expand Up @@ -250,7 +252,9 @@
"rgb_sigma0.name = 'sigma0_rgba'\n",
"xsar_obj.datatree['measurement'] = xsar_obj.datatree['measurement'].assign(xr.merge([xsar_obj.dataset,rgb_sigma0]))\n",
"xsar_obj.recompute_attrs()\n",
"xsar_obj.dataset['sigma0_rgba'].rio.reproject('epsg:4326',shape=(1000,1000),nodata=0).rio.to_raster('/tmp/sigma0_color.tiff')"
"s0rgba = xsar_obj.dataset['sigma0_rgba'].rio.reproject('epsg:4326',shape=(1000,1000),nodata=0)\n",
"s0rgba.attrs['name'] = 'sigma0 RGBA projected using rioxarray'\n",
"s0rgba.rio.to_raster('/tmp/sigma0_color.tiff')"
]
},
{
Expand All @@ -265,11 +269,12 @@
"# there is a transparency bug in geoviews (/~https://github.com/holoviz/geoviews/issues/571)\n",
"# but if loading this tiff in google earth, it should render properly\n",
"tmp = rioxarray.open_rasterio('/tmp/sigma0_color.tiff')\n",
"tmp = tmp.rename('sigma0_color').isel(band=0)\n",
"tmp2 = gv.util.from_xarray(tmp)\n",
"print(tmp2)\n",
"tmp = tmp.rename('sigma0_color').isel(band=1)\n",
"print(tmp)\n",
"# tmp2 = gv.util.from_xarray(tmp,vdims='sigma0_color')\n",
"# print('empt2\\n====================',tmp2)\n",
"# (gv.tile_sources.Wikipedia * gv.load_tiff('/tmp/sigma0_color.tiff')).opts(width=600,height=600)\n",
"(gv.tile_sources.Wikipedia * gv.Image(tmp2,vdims='sigma0_color',kdims=['x','y']).opts(cmap='magma',colorbar=True)).opts(width=600,height=600)"
"(gv.tile_sources.EsriImagery * gv.Image(tmp,vdims='sigma0_color',kdims=['x','y']).opts(cmap='magma',colorbar=True,tools=['hover'])).opts(width=600,height=600) # "
]
},
{
Expand Down Expand Up @@ -390,7 +395,7 @@
"source": [
"merged_lonlat = merged_grid.rio.reproject(4326)\n",
"(\n",
" gv.tile_sources.Wikipedia * gv.Image(merged_lonlat['spd']).opts(cmap='jet', alpha=0.3) \n",
" gv.tile_sources.EsriImagery * gv.Image(merged_lonlat['spd']).opts(cmap='jet', alpha=0.3) \n",
" * gv.Image(merged_lonlat['sigma0']).opts(cmap='gray', clim=(0,0.05), alpha=0.7)\n",
").opts(width=600,height=600)\n",
" "
Expand Down
8 changes: 4 additions & 4 deletions docs/examples/xsar_tops_slc.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@
"#gv.tile_sources.Wikipedia*gv.Polygons(multids.bursts(only_valid_location=True)['geometry']).opts(width=800,height=450,alpha=0.5)\n",
"import pandas as pd\n",
"tmp = pd.concat([xsar.Sentinel1Dataset(onesubswath).get_bursts_polygons(only_valid_location=True) for onesubswath in multids.subdatasets.index])\n",
"gv.tile_sources.Wikipedia*gv.Polygons(tmp['geometry']).opts(width=800,height=450,alpha=0.5)\n"
"gv.tile_sources.EsriImagery*gv.Polygons(tmp['geometry']).opts(width=800,height=450,alpha=0.5)\n"
]
},
{
Expand All @@ -94,7 +94,7 @@
"outputs": [],
"source": [
"tmp2 = pd.concat([xsar.Sentinel1Dataset(onesubswath).get_bursts_polygons(only_valid_location=False) for onesubswath in multids.subdatasets.index])\n",
"gv.tile_sources.Wikipedia*gv.Polygons(tmp2['geometry']).opts(width=800,height=450,alpha=0.2)*gv.Polygons(tmp['geometry']).opts(width=800,height=450,alpha=0.2)\n"
"gv.tile_sources.EsriImagery*gv.Polygons(tmp2['geometry']).opts(width=800,height=450,alpha=0.2)*gv.Polygons(tmp['geometry']).opts(width=800,height=450,alpha=0.2)\n"
]
},
{
Expand Down Expand Up @@ -261,7 +261,7 @@
"metadata": {},
"outputs": [],
"source": [
"s1ds.apply_calibration_and_denoising()\n",
"s1ds.apply_calibration_and_denoising() \n",
"s1ds.dataset"
]
},
Expand Down Expand Up @@ -428,7 +428,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.8"
"version": "3.9.15"
},
"toc-autonumbering": true
},
Expand Down
54 changes: 43 additions & 11 deletions src/xsar/sentinel1_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def __init__(self, dataset_id, resolution=None,
# default dtypes
if dtypes is not None:
self._dtypes.update(dtypes)

# default meta for map_blocks output.
# as asarray is imported from numpy, it's a numpy array.
# but if later we decide to import asarray from cupy, il will be a cupy.array (gpu)
Expand Down Expand Up @@ -285,15 +284,19 @@ def __init__(self, dataset_id, resolution=None,
self.datatree.attrs.update(self.sar_meta.to_dict("all"))

# load land_mask by default for GRD products

self.add_high_resolution_variables(
patch_variable=patch_variable, luts=luts, lazy_loading=lazyloading)
if 'GRD' in str(self.datatree.attrs['product']):
self.add_high_resolution_variables(
patch_variable=patch_variable, luts=luts, lazy_loading=lazyloading)
if self.apply_recalibration:
self.select_gains()
self.apply_calibration_and_denoising()

# added 6 fev 23, to fill empty attrs
self.datatree['measurement'].attrs = self.datatree.attrs
if self.sar_meta.product == 'SLC' and 'WV' not in self.sar_meta.swath: # TOPS cases
tmp = self.corrected_range_noise_lut(self.datatree)
self.datatree['noise_range'].ds = tmp # the corrcted noise_range dataset shold now contain an attrs 'corrected_range_noise_lut'
self.sliced = False
"""True if dataset is a slice of original L1 dataset"""

Expand All @@ -303,6 +306,35 @@ def __init__(self, dataset_id, resolution=None,
# save original bbox
self._bbox_coords_ori = self._bbox_coords

def corrected_range_noise_lut(self,dt):
"""
Patch F.Nouguier see https://jira-projects.cls.fr/browse/MPCS-3581 and /~https://github.com/umr-lops/xsar_slc/issues/175
Return range noise lut with corrected line numbering. This function should be used only on the full SLC dataset dt
Args:
dt (xarray.datatree) : datatree returned by xsar corresponding to one subswath
Return:
(xarray.dataset) : range noise lut with corrected line number
"""
# Detection of azimuthTime jumps (linked to burst changes). Burst sensingTime should NOT be used since they have erroneous value too !
line_shift = 0
tt = dt['measurement']['time']
i_jump = np.ravel(np.argwhere(np.diff(tt)<np.timedelta64(0))+1) # index of jumps
line_jump_meas = dt['measurement']['line'][i_jump] # line number of jumps
# line_jump_noise = np.ravel(dt['noise_range']['line'][1:-1].data) # annotated line number of burst begining, this one is corrupted for some S1 TOPS product
line_jump_noise = np.ravel(dt['noise_range']['line'][1:1+len(line_jump_meas)].data) # annoted line number of burst begining
burst_first_lineshift = line_jump_meas-line_jump_noise
if len(np.unique(burst_first_lineshift))==1:
line_shift = int(np.unique(burst_first_lineshift)[0])
logging.debug('line_shift: %s',line_shift)
else:
raise ValueError('Inconsistency in line shifting : {}'.format(burst_first_lineshift))
res = dt['noise_range'].ds.assign_coords({'line':dt['noise_range']['line']+line_shift})
if line_shift==0:
res.attrs['corrected_range_noise_lut'] = 'no change'
else:
res.attrs['corrected_range_noise_lut'] = 'shift : %i lines'%line_shift
return res

def select_gains(self):
"""
attribution of the good swath gain by getting the swath number of each pixel
Expand Down Expand Up @@ -469,12 +501,12 @@ def add_high_resolution_variables(self, luts=False, patch_variable=True, skip_va
self._dataset = self._dataset.merge(
self._load_from_geoloc(geoloc_vars, lazy_loading=lazy_loading))


self.add_swath_number()
if self.apply_recalibration:
self.add_gains(config["auxiliary_names"][self.sar_meta.short_name.split(":")[-2][0:3]][self.aux_config_name]["AUX_CAL"],
config["auxiliary_names"][self.sar_meta.short_name.split(":")[-2][0:3]][self.aux_config_name]["AUX_PP1"])
if 'GRD' in str(self.datatree.attrs['product']):
self.add_swath_number()

if self.apply_recalibration:
self.add_gains(config["auxiliary_names"][self.sar_meta.short_name.split(":")[-2][0:3]][self.aux_config_name]["AUX_CAL"],
config["auxiliary_names"][self.sar_meta.short_name.split(":")[-2][0:3]][self.aux_config_name]["AUX_PP1"])

rasters = self._load_rasters_vars()
if rasters is not None:
Expand Down Expand Up @@ -526,14 +558,14 @@ def add_swath_number(self):
'line': self._dataset.coords["line"], 'sample': self._dataset.coords["sample"]})

# Supposons que dataset.swaths ait 45 éléments comme mentionné
for i in range(len(self.datatree['swath_merging'].swaths)):
for i in range(len(self.datatree['swath_merging'].ds['swaths'])):
y_min, y_max = self.datatree['swath_merging']['firstAzimuthLine'][
i], self.datatree['swath_merging']['lastAzimuthLine'][i]
x_min, x_max = self.datatree['swath_merging']['firstRangeSample'][
i], self.datatree['swath_merging']['lastRangeSample'][i]

# Localisation des pixels appartenant à ce swath
swath_index = int(self.datatree['swath_merging'].swaths[i])
swath_index = int(self.datatree['swath_merging'].ds['swaths'][i])

condition = (self._dataset.line >= y_min) & (self._dataset.line <= y_max) & (
self._dataset.sample >= x_min) & (self._dataset.sample <= x_max)
Expand Down

0 comments on commit 9ea17e7

Please sign in to comment.