From ca99705c1f30cdff35ec2b6a9489e6bbba15d7f4 Mon Sep 17 00:00:00 2001 From: agrouaze Date: Mon, 6 Jan 2025 17:29:48 +0100 Subject: [PATCH 01/10] first atempt to replace xarray-datatree by xarray native DataTrees --- ci/requirements/environment.yaml | 1 - docs/conf.py | 1 - docs/index.rst | 3 +- docs/installing.rst | 4 +- environment.yml | 1 - get_polygons_from_l1b.py | 70 +------------------ pyproject.toml | 1 - slcl1butils/coloc/coloc.py | 9 ++- slcl1butils/compute/compute_from_l1b.py | 9 +-- slcl1butils/compute/stack_wv_l1c_monthly.py | 13 ++-- slcl1butils/plotting/quadmeshgeoviewsL1B.py | 3 - .../scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py | 2 +- .../scripts/do_WV_L1C_SAFE_from_L1B_SAFE.py | 2 +- 13 files changed, 17 insertions(+), 102 deletions(-) diff --git a/ci/requirements/environment.yaml b/ci/requirements/environment.yaml index 8520279..e753605 100644 --- a/ci/requirements/environment.yaml +++ b/ci/requirements/environment.yaml @@ -23,7 +23,6 @@ dependencies: - fsspec # data - xarray - - xarray-datatree - dask - numpy - pandas diff --git a/docs/conf.py b/docs/conf.py index 978dff2..db2fb7a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,6 @@ "numpy": ("https://numpy.org/doc/stable", None), "xarray": ("https://docs.xarray.dev/en/latest/", None), "rasterio": ("https://rasterio.readthedocs.io/en/latest/", None), - "datatree": ("https://xarray-datatree.readthedocs.io/en/latest/", None), 'geoviews': ('https://geoviews.org/index.html', None) } diff --git a/docs/index.rst b/docs/index.rst index d7e2cc3..9e3482a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,7 +6,7 @@ slcl1butils: functions to play with IFREMER L1B Sentinel-1 SLC SAR products Acquisition modes available in L1B IFREMER product family are IW and WV. -The products are *netCDF* files containing `datatree`_ object. +The products are *netCDF* files containing `xarray.datatree` object. @@ -104,5 +104,4 @@ Last documentation build: |today| basic_api .. _on github: /~https://github.com/umr-lops/utils_xsarslc_l1b -.. _datatree: /~https://github.com/xarray-contrib/datatree .. _xarray: http://xarray.pydata.org \ No newline at end of file diff --git a/docs/installing.rst b/docs/installing.rst index 27b8562..c42b24e 100644 --- a/docs/installing.rst +++ b/docs/installing.rst @@ -4,8 +4,8 @@ Installation ************ -L1B SAR SLC IFREMER products are netCDF (.nc) files containing groups. -To read netCDF files with groups, a possible python library is `xarray-datatree`. +Level-1B SAR SLC IFREMER products are netCDF (.nc) files containing groups. +To read netCDF files with groups, a possible python library is `xarray`. Installation in a conda_ environment is recommended. diff --git a/environment.yml b/environment.yml index 6ef0a23..220d81c 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,6 @@ dependencies: - geoviews - fsspec - numpy - - xarray-datatree - xarray - rasterio - rioxarray diff --git a/get_polygons_from_l1b.py b/get_polygons_from_l1b.py index cc0462d..589271c 100644 --- a/get_polygons_from_l1b.py +++ b/get_polygons_from_l1b.py @@ -1,15 +1,10 @@ # + -import datatree import numpy as np -from glob import glob -import os -import matplotlib.pyplot as plt from shapely import geometry from shapely import wkt -import time + import xarray as xr -# from xsarslc.tools import xndindex # + def get_swath_tiles_polygons_from_l1bgroup(l1b_ds, swath_only=False, variable_names = [None],ik=0,burst_type=None): @@ -176,66 +171,3 @@ def get_swath_tiles_polygons_from_l1bfiles(l1b_files, variable_names = [None], i return polygons,coordinates,variables else: return polygons,coordinates - - -# def get_polygons_from_l1b(files, varnames = None): - -# pts_tiles_intra = []; pts_tiles_inter = []; pts_swath = [] -# t0 = time.time() - -# pts_tiles = {} -# burst_type = ['intraburst','interburst'] -# variables = {} -# for brst in burst_type: -# variables[brst] = {} -# pts_tiles[brst] = [] -# if (varnames is not None): -# for varname in varnames: -# variables[brst][varname] = [] - -# for cpt,_file in enumerate(files): - -# dt = datatree.open_datatree(_file) - -# polyswath = wkt.loads(dt['intraburst'].ds.attrs['footprint']) -# lon,lat = polyswath.exterior.xy -# _pts_swath = [(x,y) for x,y in zip(lon,lat)] -# pts_swath.append(_pts_swath) - - -# for brst in burst_type: - -# lon_corners = dt[brst]['corner_longitude'].squeeze() -# lat_corners = dt[brst]['corner_latitude'].squeeze() -# Nt = dt[brst].ds.sizes['tile_sample'] -# bursts = dt[brst]['burst'].values -# for ib in bursts: -# for it in np.arange(Nt): - -# # Get corner list -# _lon1 = lon_corners.sel(burst=ib).isel(tile_sample=it,c_line=0) -# _lon2 = lon_corners.sel(burst=ib).isel(tile_sample=it,c_line=1) -# _lat1 = lat_corners.sel(burst=ib).isel(tile_sample=it,c_line=0) -# _lat2 = lat_corners.sel(burst=ib).isel(tile_sample=it,c_line=1) -# lon = list(_lon1.values) + list(_lon2.values[::-1]) -# lat = list(_lat1.values) + list(_lat2.values[::-1]) -# _pts_tiles = [ (x,y) for x,y in zip(lon,lat)] -# pts_tiles[brst].append(_pts_tiles) - -# # Get variables -# if (varnames is not None): -# for varname in varnames: -# variables[brst][varname].append(dt[brst][varname].sel(burst=ib).isel(tile_sample=it).values[0]) - - - - -# t1 = time.time() -# print(t1-t0) -# if (varnames is not None): -# return pts_swath, pts_tiles, variables -# else: -# return pts_swath, pts_tiles -# - - - diff --git a/pyproject.toml b/pyproject.toml index 15d2c9e..0b27ea0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,6 @@ dependencies = [ "numpy", "netCDF4", "shapely", - "xarray-datatree", 'tqdm', "zarr", 'fsspec', diff --git a/slcl1butils/coloc/coloc.py b/slcl1butils/coloc/coloc.py index d4bb441..5881da0 100644 --- a/slcl1butils/coloc/coloc.py +++ b/slcl1butils/coloc/coloc.py @@ -5,11 +5,10 @@ from slcl1butils.raster_readers import ww3_IWL1Btrack_hindcasts_30min import sys, os from slcl1butils.get_polygons_from_l1b import get_swath_tiles_polygons_from_l1bgroup -from datetime import datetime, timedelta +from datetime import datetime from glob import glob import numpy as np import xarray as xr -from datatree import DataTree import logging @@ -182,9 +181,9 @@ def do_coloc_L1B_with_raster_SAFE(full_safe_file, ancillary_list, skip=True)->in ds_inter = xr.merge([l1b_ds_inter, xr.merge(ds_inter)]) # Building the output datatree # Data Tree for outputs - dt = DataTree() - dt['intraburst'] = DataTree(data=ds_intra) - dt['interburst'] = DataTree(data=ds_inter) + dt = xr.DataTree() + dt['intraburst'] = xr.DataTree(data=ds_intra) + dt['interburst'] = xr.DataTree(data=ds_inter) # return dt, ds_intra, ds_inter # Saving the results in netCDF diff --git a/slcl1butils/compute/compute_from_l1b.py b/slcl1butils/compute/compute_from_l1b.py index 264dab1..0ad66cf 100644 --- a/slcl1butils/compute/compute_from_l1b.py +++ b/slcl1butils/compute/compute_from_l1b.py @@ -18,10 +18,6 @@ def compute_xs_from_l1b(_file, burst_type='intra', time_separation='2tau')->(xr. """ # Reading the l1b file # Loading the specified burst group - # dt = datatree.open_datatree(_file) - # Version 1.4 - # ds = xr.open_dataset(_file,group=burst_type+'burst_xspectra') - # Version 1.4a if 'wv' in _file: ds = xr.open_dataset(_file, group=burst_type) else: @@ -99,10 +95,7 @@ def compute_xs_from_l1b(_file, burst_type='intra', time_separation='2tau')->(xr. def compute_xs_from_l1b_wv(_file, time_separation='2tau'): # Reading the l1b file # Loading the specified burst group - # dt = datatree.open_datatree(_file) - # Version 1.4 - # ds = xr.open_dataset(_file,group=burst_type+'burst_xspectra') - # Version 1.4a + ds = xr.open_dataset(_file, group='') # ds = dt[burst_type+'burst_xspectra'].to_dataset() diff --git a/slcl1butils/compute/stack_wv_l1c_monthly.py b/slcl1butils/compute/stack_wv_l1c_monthly.py index 2f03a3e..a4701fd 100644 --- a/slcl1butils/compute/stack_wv_l1c_monthly.py +++ b/slcl1butils/compute/stack_wv_l1c_monthly.py @@ -14,7 +14,6 @@ import datetime import time import argparse -import datatree from slcl1butils.utils import get_memory_usage from slcl1butils.get_config import get_conf from dateutil import relativedelta @@ -663,7 +662,7 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): vL1C = "unknown" vL1B = "unknown" # ds = None - dtnew = datatree.DataTree(name="root", data=None) + dtnew = xr.DataTree(name="root", data=None) if len(list_SAFEs) > 0: # for safe_xsp in list_SAFEs: @@ -676,12 +675,12 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): logging.warning("empty safe: %s", safe_xsp) else: if ssi==0: - dt = datatree.open_datatree(lst_nc_l1c[0]) + dt = xr.open_datatree(lst_nc_l1c[0]) vL1C = dt.attrs["L1C_product_version"] vL1B = dt.attrs["L1B_product_version"] if keep_xspectrum is True and k_rg_ref is None: # code below executed only once thanks to this line # dsfirst = xr.open_dataset(lst_nc_l1c[0], group='intraburst', engine='netcdf4', cache=False) - dt = datatree.open_datatree(lst_nc_l1c[0]) + dt = xr.open_datatree(lst_nc_l1c[0]) dsfirst = dt["intraburst"].to_dataset() # indkaz_bo, indkaz_up, indkrg_up, indkrg_bo = get_index_wavenumbers( # dsfirst @@ -765,9 +764,9 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): ds_sar_l1b[vv].attrs['source'] = 'SAR Ifremer level-1B processor for WV SLC' ds_sar_l1b['longitude'].attrs['description'] = 'longitude of the center of level-1B tile from Sentinel-1 WV imagette' ds_sar_l1b['latitude'].attrs['description'] = 'latitude of the center of level-1B tile from Sentinel-1 WV imagette' - node_ecm = datatree.DataTree(name="ecmwf_0100_1h", parent=dtnew, data=ds_ecmwf) - node_ww3 = datatree.DataTree(name="ww3_global_yearly_3h", parent=dtnew, data=ds_ww3) - node_l1b = datatree.DataTree(name=GROUP_NAME_SAR, parent=dtnew, data=ds_sar_l1b) + node_ecm = xr.DataTree(name="ecmwf_0100_1h", parent=dtnew, data=ds_ecmwf) + node_ww3 = xr.DataTree(name="ww3_global_yearly_3h", parent=dtnew, data=ds_ww3) + node_l1b = xr.DataTree(name=GROUP_NAME_SAR, parent=dtnew, data=ds_sar_l1b) # dt['ecmwf_0100_1h'] = ds_ecmwf # dt['ww3_global_yearly_3h'] = ds_ww3 # dt['sentinel1_wv_l1b'] = ds_sar_l1b diff --git a/slcl1butils/plotting/quadmeshgeoviewsL1B.py b/slcl1butils/plotting/quadmeshgeoviewsL1B.py index 419b025..32df6e9 100644 --- a/slcl1butils/plotting/quadmeshgeoviewsL1B.py +++ b/slcl1butils/plotting/quadmeshgeoviewsL1B.py @@ -6,7 +6,6 @@ import geoviews as gv import logging import os -import datatree gv.extension("bokeh") @@ -98,8 +97,6 @@ def prepare_L1BC(ff, bursttype, variable, **kwargs): """ ds = open_of_use(ff, bursttype) - # ds.load() - # ds = datatree.open_datatree(ff)[bursttype].to_dataset() # it doesnt change anything to segmentation fault... if "burst" in ds[variable].coords: stackable_coords = ["burst", "tile_line"] else: diff --git a/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py b/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py index 722d1c7..d5a7077 100644 --- a/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py +++ b/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py @@ -13,7 +13,7 @@ from glob import glob import numpy as np import xarray as xr -from datatree import DataTree +from xarray import DataTree import time import logging import sys, os diff --git a/slcl1butils/scripts/do_WV_L1C_SAFE_from_L1B_SAFE.py b/slcl1butils/scripts/do_WV_L1C_SAFE_from_L1B_SAFE.py index c4bd30b..6105906 100644 --- a/slcl1butils/scripts/do_WV_L1C_SAFE_from_L1B_SAFE.py +++ b/slcl1butils/scripts/do_WV_L1C_SAFE_from_L1B_SAFE.py @@ -9,7 +9,7 @@ from glob import glob import numpy as np import xarray as xr -from datatree import DataTree +from xarray import DataTree import time import logging import sys, os From 612ff3b5d412d23fcd904e10e752d6c25b227e32 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 11:55:51 +0100 Subject: [PATCH 02/10] fix typo with new datatree and add L1B path in attrs --- docs/examples/display_a_IW_L1B_xspectra.ipynb | 6 +- .../do_L1C_SAFE_from_L1B_SAFE_example.ipynb | 2 +- ...g_L1B_geometry_with_holoview_example.ipynb | 2 +- slcl1butils/coloc/coloc.py | 114 +----------------- ...g_L1B_geometry_with_holoview_example.ipynb | 81 ------------- ..._L1B_variables_with_holoview_example.ipynb | 82 ------------- .../scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py | 7 +- 7 files changed, 13 insertions(+), 281 deletions(-) delete mode 100644 slcl1butils/plotting/plotting_L1B_geometry_with_holoview_example.ipynb delete mode 100644 slcl1butils/plotting/plotting_L1B_variables_with_holoview_example.ipynb diff --git a/docs/examples/display_a_IW_L1B_xspectra.ipynb b/docs/examples/display_a_IW_L1B_xspectra.ipynb index 98dde95..d02201c 100644 --- a/docs/examples/display_a_IW_L1B_xspectra.ipynb +++ b/docs/examples/display_a_IW_L1B_xspectra.ipynb @@ -27,7 +27,7 @@ "source": [ "import glob\n", "import os\n", - "import datatree\n", + "import xarray as xr\n", "from slcl1butils.utils import get_test_file\n", "#l1bncfile_pattern = os.path.abspath('../../assests/*iw*nc')\n", "one_safe_l1b = get_test_file('S1B_IW_XSP__1SDV_20210328T055258_20210328T055325_026211_0320D4_DC31_A13.SAFE')\n", @@ -47,7 +47,7 @@ }, "outputs": [], "source": [ - "dt = datatree.open_datatree(l1bncfile)\n", + "dt = xr.open_datatree(l1bncfile)\n", "dt" ] }, @@ -207,7 +207,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.9.15" } }, "nbformat": 4, diff --git a/docs/examples/do_L1C_SAFE_from_L1B_SAFE_example.ipynb b/docs/examples/do_L1C_SAFE_from_L1B_SAFE_example.ipynb index c217cc9..db1f58d 100644 --- a/docs/examples/do_L1C_SAFE_from_L1B_SAFE_example.ipynb +++ b/docs/examples/do_L1C_SAFE_from_L1B_SAFE_example.ipynb @@ -108,7 +108,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.9.15" } }, "nbformat": 4, diff --git a/docs/examples/plotting_L1B_geometry_with_holoview_example.ipynb b/docs/examples/plotting_L1B_geometry_with_holoview_example.ipynb index 09284c2..255369b 100644 --- a/docs/examples/plotting_L1B_geometry_with_holoview_example.ipynb +++ b/docs/examples/plotting_L1B_geometry_with_holoview_example.ipynb @@ -164,7 +164,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.9.15" } }, "nbformat": 4, diff --git a/slcl1butils/coloc/coloc.py b/slcl1butils/coloc/coloc.py index 5881da0..c3ead82 100644 --- a/slcl1butils/coloc/coloc.py +++ b/slcl1butils/coloc/coloc.py @@ -82,117 +82,9 @@ def coloc_tiles_from_l1bgroup_with_raster(l1b_ds, raster_bb_ds, apply_merging=Tr projected_field = upscaled_da.interp(x=lonsar,y=latsar,assume_sorted=False).drop_vars(['x', 'y']) mapped_ds_list.append(projected_field) raster_mapped = xr.merge(mapped_ds_list) - merged_raster_mapped = xr.merge([l1b_ds, raster_mapped]) + if apply_merging: + merged_raster_mapped = xr.merge([l1b_ds, raster_mapped]) return merged_raster_mapped else: - return raster_mapped - - -def do_coloc_L1B_with_raster_SAFE(full_safe_file, ancillary_list, skip=True)->int: - """ - - :param full_safe_file (str): path of the product to co-localise - :param ancillary_list (list): information about the reference products to add - :param skip (bool): True -> do nothing to original product [optional] - - :Returns: - - """ - # TODO: see whether we keep or not this method - safe_file = os.path.basename(full_safe_file) - safe_path = os.path.dirname(full_safe_file) + '/' - sth = 1 - - files = glob(os.path.join(safe_path, safe_file, '*_L1B_*nc')) - for _file in files: - - # ==================== - # FILE OUT - # ==================== - - if sth > 0: - # - # - # Output file directory - pathout_root = safe_path.replace('l1b', 'l1e') - # print(~os.path.isdir(pathout_root)) - if (os.path.isdir(pathout_root) == False): - os.system('mkdir ' + pathout_root) - pathout = pathout_root + safe_file + '/' - # print(os.path.isdir(pathout)) - if (os.path.isdir(pathout) == False): - os.system('mkdir ' + pathout) - # - # Ouput filename - fileout = os.path.basename(_file).replace('L1B', 'L1D') - if skip and os.path.isfile(pathout + fileout): - print('File already created and available on disk') - print('Here: ' + pathout + fileout) - continue - logging.info('File out: %s', pathout + fileout) - - ds_inter = [] - ds_intra = [] - for ancillary in ancillary_list: - - print(ancillary['name']) - - # For each L1B - burst_type = 'intra' - l1b_ds = xr.open_dataset(_file, group=burst_type + 'burst') - ## Check if the ancillary data can be found - sar_date = datetime.strptime(str.split(l1b_ds.attrs['start_date'], '.')[0], '%Y-%m-%d %H:%M:%S') - closest_date, filename = resource_strftime(ancillary['resource'], step=ancillary['step'], date=sar_date) - if (len(glob(filename)) != 1): - continue - # Getting the raster from anxillary data - if (ancillary['name'] == 'ecmwf_0100_1h'): - raster_ds = ecmwf_0100_1h(filename) - elif (ancillary['name'] == 'ww3_global_yearly_3h'): - raster_ds = ww3_global_yearly_3h(filename, closest_date) - elif (ancillary['name']) == 'ww3hindcast_field': - raster_ds == ww3_IWL1Btrack_hindcasts_30min(filename,closest_date) - else: - raise ValueError('%s not a correct dataset name'%ancillary['name']) - - # Get the polygons of the swath data - polygons = get_swath_tiles_polygons_from_l1bgroup(l1b_ds, swath_only=True) - # Crop the raster to the swath bounding box limit - raster_bb_ds = raster_cropping_in_polygon_bounding_box(polygons['swath'][0], raster_ds) - - # Loop on the grid in the product - burst_types = ['intra', 'inter'] - for burst_type in burst_types: - # Define the dataset to work on - # get the mapped raster onto swath grid for each tile - if (burst_type == 'intra'): - l1b_ds_intra = xr.open_dataset(_file, group=burst_type + 'burst') - _ds = coloc_tiles_from_l1bgroup_with_raster(l1b_ds_intra, raster_bb_ds, apply_merging=False) - ds_intra.append(_ds) - else: - l1b_ds_inter = xr.open_dataset(_file, group=burst_type + 'burst') - _ds = coloc_tiles_from_l1bgroup_with_raster(l1b_ds_inter, raster_bb_ds, apply_merging=False) - ds_inter.append(_ds) - - # return ds_inter, l1b_ds_inter - # Merging the datasets - ds_intra = xr.merge([l1b_ds_intra, xr.merge(ds_intra)]) - ds_inter = xr.merge([l1b_ds_inter, xr.merge(ds_inter)]) - # Building the output datatree - # Data Tree for outputs - dt = xr.DataTree() - dt['intraburst'] = xr.DataTree(data=ds_intra) - dt['interburst'] = xr.DataTree(data=ds_inter) - # return dt, ds_intra, ds_inter - - # Saving the results in netCDF - # ==================== - # FILE OUT - # ==================== - sth = 1 - if (sth > 0): - # Saving the results in netCDF - dt.to_netcdf(pathout + fileout) - - return 0 + return raster_mapped \ No newline at end of file diff --git a/slcl1butils/plotting/plotting_L1B_geometry_with_holoview_example.ipynb b/slcl1butils/plotting/plotting_L1B_geometry_with_holoview_example.ipynb deleted file mode 100644 index 0b532c8..0000000 --- a/slcl1butils/plotting/plotting_L1B_geometry_with_holoview_example.ipynb +++ /dev/null @@ -1,81 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "834371f1-0262-4bd9-a95e-8125c6a2793e", - "metadata": {}, - "outputs": [], - "source": [ - "import holoviews as hv\n", - "hv.extension('bokeh')\n", - "import geoviews as gv\n", - "\n", - "import datatree\n", - "import numpy as np\n", - "from glob import glob\n", - "import os\n", - "import matplotlib.pyplot as plt\n", - "from shapely import geometry\n", - "from shapely import wkt\n", - "\n", - "from get_polygones_from_l1b import get_polys_alone" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52f92190-5ae2-4be6-9ba1-0c692af801b2", - "metadata": {}, - "outputs": [], - "source": [ - "#path = '/home/amouche/data/sentinel1/XSAR_L1B/L1B_SLC/v1.4a/S1A_IW_SLC*20210306*.SAFE/'\n", - "#path = '/home/amouche/data/sentinel1/XSAR_L1B/L1B_SLC/v1.4a/S1A_IW_SLC*20210306T072758_20210306T072828*.SAFE/'\n", - "path = '/home/datawork-cersat-public/project/sarwave/data/products/tests/iw/slc/l1b/1.4a/S1A_IW_SLC*20210306*.SAFE/'\n", - "#path = '/home/amouche/data/sentinel1/XSAR_L1B/L1B_SLC/v1.4a/S1A_IW_SLC__1SDV_20210306T072758_20210306T072828_036874_04562A_A67C.SAFE/'\n", - "files = glob(path + '*vv*.nc')\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3585453f-a15c-490e-ad08-129c10a44935", - "metadata": {}, - "outputs": [], - "source": [ - "polys = get_polys_alone(files)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9ae1c2aa-3dbf-42f2-9b4c-d5a89839b7da", - "metadata": {}, - "outputs": [], - "source": [ - "polys.opts(width=1000,height=1500,tools=['hover'])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/slcl1butils/plotting/plotting_L1B_variables_with_holoview_example.ipynb b/slcl1butils/plotting/plotting_L1B_variables_with_holoview_example.ipynb deleted file mode 100644 index 88cbbac..0000000 --- a/slcl1butils/plotting/plotting_L1B_variables_with_holoview_example.ipynb +++ /dev/null @@ -1,82 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "137f73fc-b4ef-45a8-bf7f-59e3dd1cd10c", - "metadata": {}, - "outputs": [], - "source": [ - "import holoviews as hv\n", - "hv.extension('bokeh')\n", - "import geoviews as gv\n", - "\n", - "import datatree\n", - "import numpy as np\n", - "from glob import glob\n", - "import os\n", - "import matplotlib.pyplot as plt\n", - "from shapely import geometry\n", - "from shapely import wkt\n", - "\n", - "from get_polygones_from_l1b import get_polys_with_varname" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "60760ff6-bda3-4291-a039-86d4ebc672a8", - "metadata": {}, - "outputs": [], - "source": [ - "#path = '/home/amouche/data/sentinel1/XSAR_L1B/L1B_SLC/v1.4a/S1A_IW_SLC*20210306*.SAFE/'\n", - "#path = '/home/amouche/data/sentinel1/XSAR_L1C/L1C_SLC/v1.4a/S1A_IW_SLC*20210306*.SAFE/'\n", - "path = '/home/datawork-cersat-public/project/sarwave/data/products/tests/iw/slc/l1b/1.4a/S1A_IW_SLC*20210306*.SAFE/'\n", - "files = glob(path + '*vv*.nc')\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "83bf2f02-9079-41a9-8838-c2a789508d02", - "metadata": {}, - "outputs": [], - "source": [ - "polys = get_polys_with_varname(files,burst_type=['intraburst'],varname='doppler_centroid',cmap='seismic',alpha=0.75,clim=(-0.1,0.1))\n", - "#polys = get_polys_with_varname(files,burst_type=['interburst'],varname='macs_Im',cmap='seismic',alpha=0.75,clim=(-100,100))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ae34341a-ea20-4843-9c5e-25eff147b86c", - "metadata": {}, - "outputs": [], - "source": [ - "\n", - "polys.opts(width=1000,height=1500,tools=['hover'])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py b/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py index d5a7077..c5249f7 100644 --- a/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py +++ b/slcl1butils/scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py @@ -119,6 +119,9 @@ def do_L1C_SAFE_from_L1B_SAFE( colocat=colocat, time_separation=time_separation, ) + # add source L1B product + ds_intra.attrs['L1B_XSP_PATH'] = os.path.dirname(l1b_fullpath) + ds_inter.attrs['L1B_XSP_PATH'] = os.path.dirname(l1b_fullpath) ds_intra, ds_inter = add_missing_variables(ds_intra, ds_inter) for anc in flag_ancillaries_added: if flag_ancillaries_added[anc]: @@ -326,9 +329,9 @@ def save_l1c_to_netcdf(l1c_full_path, ds_intra, ds_inter, version): # Building the output datatree dt = DataTree() burst_type = "intra" - dt[burst_type + "burst"] = DataTree(data=ds_intra) + dt[burst_type + "burst"] = DataTree(ds_intra) burst_type = "inter" - dt[burst_type + "burst"] = DataTree(data=ds_inter) + dt[burst_type + "burst"] = DataTree(ds_inter) dt.attrs["version_slcl1butils"] = slcl1butils.__version__ dt.attrs["product_version"] = version From becf87038513c4eaf0d2bc953d20a830f9e16caf Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 11:58:27 +0100 Subject: [PATCH 03/10] add a high level check --- high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py diff --git a/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py b/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py new file mode 100644 index 0000000..86105f5 --- /dev/null +++ b/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py @@ -0,0 +1,30 @@ +import os, sys +import pdb +from glob import glob +from slcl1butils.scripts.do_IW_L1C_SAFE_from_L1B_SAFE import do_L1C_SAFE_from_L1B_SAFE +import slcl1butils.utils +from importlib import reload +from slcl1butils.utils import get_test_file +import logging +reload(logging) +logging.basicConfig(level=logging.INFO) +from slcl1butils.get_config import get_conf +conf = get_conf() +conf +one_safe_l1b = get_test_file('S1B_IW_XSP__1SDV_20210328T055258_20210328T055325_026211_0320D4_DC31_A13.SAFE') +one_safe_l1b +ancillary_datasets = conf['auxilliary_dataset'] +ancillary_datasets.pop('ww3hindcast_spectra',None) +ancillary_datasets.pop('ww3_global_yearly_3h',None) +full_safe_files = [one_safe_l1b] +version = '0.1' +for ffi,full_safe_file in enumerate(full_safe_files): + + print('%i/%i'%(ffi,len(full_safe_files))) + print('===') + print(os.path.basename(full_safe_file)) + print('===') + ret = do_L1C_SAFE_from_L1B_SAFE(full_safe_file,version=version,outputdir=conf['iw_outputdir'], + ancillary_list=ancillary_datasets,dev=True,overwrite=True) + logging.info('new file: %s',ret) +logging.info('high level check: OK (successful)') \ No newline at end of file From ad1521da5984d13baefdcb0f3dbf70f2bccdd875 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 12:17:55 +0100 Subject: [PATCH 04/10] linting black --- high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py | 50 +- slcl1butils/__init__.py | 7 +- slcl1butils/coloc/coloc.py | 36 +- slcl1butils/coloc/coloc_IW_WW3spectra.py | 40 +- slcl1butils/coloc/coloc_WV_WW3spectra.py | 11 +- slcl1butils/compute/compute_from_l1b.py | 108 +-- slcl1butils/compute/homogeneous_output.py | 41 +- slcl1butils/compute/stack_iw_l1b.py | 5 +- slcl1butils/compute/stack_wv_l1c_monthly.py | 379 ++++++----- slcl1butils/conversion_polar_cartesian.py | 94 +-- slcl1butils/get_config.py | 21 +- slcl1butils/get_polygons_from_l1b.py | 215 +++--- ..._azimuth_cutoff_lines_on_polar_spec_fig.py | 40 +- slcl1butils/plotting/display_one_spectra.py | 628 ++++++++++++------ slcl1butils/plotting/display_xspectra_grid.py | 413 +++++++----- slcl1butils/plotting/quadmeshgeoviewsL1B.py | 18 +- .../spectra_plot_circles_wavenumbers.py | 20 +- slcl1butils/raster_readers.py | 15 +- .../scripts/do_IW_L1C_SAFE_from_L1B_SAFE.py | 81 +-- .../scripts/do_WV_L1C_SAFE_from_L1B_SAFE.py | 69 +- .../do_WV_L1C_SAFE_from_L1B_SAFE_prun.py | 63 +- slcl1butils/spectrum_clockwise_to_trigo.py | 25 +- slcl1butils/spectrum_rotation.py | 20 +- slcl1butils/symmetrize_l1b_spectra.py | 28 +- slcl1butils/utils.py | 63 +- 25 files changed, 1506 insertions(+), 984 deletions(-) diff --git a/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py b/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py index 86105f5..f1d1aef 100644 --- a/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py +++ b/high-level-tests/do_L1C_SAFE_from_L1B_SAFE.py @@ -1,30 +1,34 @@ -import os, sys -import pdb -from glob import glob -from slcl1butils.scripts.do_IW_L1C_SAFE_from_L1B_SAFE import do_L1C_SAFE_from_L1B_SAFE -import slcl1butils.utils +import logging +import os from importlib import reload + +from slcl1butils.get_config import get_conf +from slcl1butils.scripts.do_IW_L1C_SAFE_from_L1B_SAFE import do_L1C_SAFE_from_L1B_SAFE from slcl1butils.utils import get_test_file -import logging + reload(logging) logging.basicConfig(level=logging.INFO) -from slcl1butils.get_config import get_conf conf = get_conf() -conf -one_safe_l1b = get_test_file('S1B_IW_XSP__1SDV_20210328T055258_20210328T055325_026211_0320D4_DC31_A13.SAFE') -one_safe_l1b -ancillary_datasets = conf['auxilliary_dataset'] -ancillary_datasets.pop('ww3hindcast_spectra',None) -ancillary_datasets.pop('ww3_global_yearly_3h',None) +one_safe_l1b = get_test_file( + "S1B_IW_XSP__1SDV_20210328T055258_20210328T055325_026211_0320D4_DC31_A13.SAFE" +) +ancillary_datasets = conf["auxilliary_dataset"] +ancillary_datasets.pop("ww3hindcast_spectra", None) +ancillary_datasets.pop("ww3_global_yearly_3h", None) full_safe_files = [one_safe_l1b] -version = '0.1' -for ffi,full_safe_file in enumerate(full_safe_files): - - print('%i/%i'%(ffi,len(full_safe_files))) - print('===') +version = "0.1" +for ffi, full_safe_file in enumerate(full_safe_files): + print("%i/%i" % (ffi, len(full_safe_files))) + print("===") print(os.path.basename(full_safe_file)) - print('===') - ret = do_L1C_SAFE_from_L1B_SAFE(full_safe_file,version=version,outputdir=conf['iw_outputdir'], - ancillary_list=ancillary_datasets,dev=True,overwrite=True) - logging.info('new file: %s',ret) -logging.info('high level check: OK (successful)') \ No newline at end of file + print("===") + ret = do_L1C_SAFE_from_L1B_SAFE( + full_safe_file, + version=version, + outputdir=conf["iw_outputdir"], + ancillary_list=ancillary_datasets, + dev=True, + overwrite=True, + ) + logging.info("new file: %s", ret) +logging.info("high level check: OK (successful)") diff --git a/slcl1butils/__init__.py b/slcl1butils/__init__.py index 7906495..50da6af 100644 --- a/slcl1butils/__init__.py +++ b/slcl1butils/__init__.py @@ -1,7 +1,6 @@ - -__all__ = ['utils','compute','plotting','scripts','coloc','legacy_ocean'] +__all__ = ["utils", "compute", "plotting", "scripts", "coloc", "legacy_ocean"] try: from importlib import metadata -except ImportError: # for Python<3.8 +except ImportError: # for Python<3.8 import importlib_metadata as metadata -__version__ = metadata.version('slcl1butils') +__version__ = metadata.version("slcl1butils") diff --git a/slcl1butils/coloc/coloc.py b/slcl1butils/coloc/coloc.py index c3ead82..2478116 100644 --- a/slcl1butils/coloc/coloc.py +++ b/slcl1butils/coloc/coloc.py @@ -1,15 +1,5 @@ -import pdb -from slcl1butils.raster_readers import ecmwf_0100_1h -from slcl1butils.raster_readers import ww3_global_yearly_3h -from slcl1butils.raster_readers import resource_strftime -from slcl1butils.raster_readers import ww3_IWL1Btrack_hindcasts_30min -import sys, os -from slcl1butils.get_polygons_from_l1b import get_swath_tiles_polygons_from_l1bgroup -from datetime import datetime -from glob import glob import numpy as np import xarray as xr -import logging def raster_cropping_in_polygon_bounding_box(poly_tile, raster_ds, enlarge=True, step=1): @@ -32,25 +22,27 @@ def raster_cropping_in_polygon_bounding_box(poly_tile, raster_ds, enlarge=True, lat_range = [lat1, lat2] # ensure dims ordering - raster_ds = raster_ds.transpose('y', 'x') + raster_ds = raster_ds.transpose("y", "x") # ensure coords are increasing ( for RectBiVariateSpline ) - for coord in ['x', 'y']: + for coord in ["x", "y"]: if raster_ds[coord].values[-1] < raster_ds[coord].values[0]: raster_ds = raster_ds.reindex({coord: raster_ds[coord][::-1]}) # from lon/lat box in xsar dataset, get the corresponding box in raster_ds (by index) ilon_range = [ np.searchsorted(raster_ds.x.values, lon_range[0]), - np.searchsorted(raster_ds.x.values, lon_range[1]) + np.searchsorted(raster_ds.x.values, lon_range[1]), ] ilat_range = [ np.searchsorted(raster_ds.y.values, lat_range[0]), - np.searchsorted(raster_ds.y.values, lat_range[1]) + np.searchsorted(raster_ds.y.values, lat_range[1]), ] # enlarge the raster selection range, for correct interpolation - if (enlarge): - ilon_range, ilat_range = [[rg[0] - step, rg[1] + step] for rg in (ilon_range, ilat_range)] + if enlarge: + ilon_range, ilat_range = [ + [rg[0] - step, rg[1] + step] for rg in (ilon_range, ilat_range) + ] # select the xsar box in the raster raster_ds = raster_ds.isel(x=slice(*ilon_range), y=slice(*ilat_range)) @@ -74,12 +66,16 @@ def coloc_tiles_from_l1bgroup_with_raster(l1b_ds, raster_bb_ds, apply_merging=Tr lonsar = l1b_ds.longitude mapped_ds_list = [] for var in raster_bb_ds: - if var not in ['forecast_hour']: + if var not in ["forecast_hour"]: raster_da = raster_bb_ds[var] upscaled_da = raster_da upscaled_da.name = var - upscaled_da = upscaled_da.astype(float) #added by agrouaze to fix TypeError: No matching signature found at interpolation - projected_field = upscaled_da.interp(x=lonsar,y=latsar,assume_sorted=False).drop_vars(['x', 'y']) + upscaled_da = upscaled_da.astype( + float + ) # added by agrouaze to fix TypeError: No matching signature found at interpolation + projected_field = upscaled_da.interp( + x=lonsar, y=latsar, assume_sorted=False + ).drop_vars(["x", "y"]) mapped_ds_list.append(projected_field) raster_mapped = xr.merge(mapped_ds_list) @@ -87,4 +83,4 @@ def coloc_tiles_from_l1bgroup_with_raster(l1b_ds, raster_bb_ds, apply_merging=Tr merged_raster_mapped = xr.merge([l1b_ds, raster_mapped]) return merged_raster_mapped else: - return raster_mapped \ No newline at end of file + return raster_mapped diff --git a/slcl1butils/coloc/coloc_IW_WW3spectra.py b/slcl1butils/coloc/coloc_IW_WW3spectra.py index c905b92..75d1e26 100644 --- a/slcl1butils/coloc/coloc_IW_WW3spectra.py +++ b/slcl1butils/coloc/coloc_IW_WW3spectra.py @@ -1,18 +1,19 @@ -import os -import pdb -import glob -import xarray as xr import datetime +import glob import logging +import os + import numpy as np -from slcl1butils.raster_readers import resource_strftime -from slcl1butils.utils import xndindex +import xarray as xr + +from slcl1butils.legacy_ocean.ocean.xPolarSpectrum import find_closest_ww3 # from ocean.xspectrum import from_ww3 # from ocean.xPolarSpectrum import find_closest_ww3 from slcl1butils.legacy_ocean.ocean.xspectrum import from_ww3 -from slcl1butils.legacy_ocean.ocean.xPolarSpectrum import find_closest_ww3 +from slcl1butils.raster_readers import resource_strftime from slcl1butils.symmetrize_l1b_spectra import symmetrize_xspectrum +from slcl1butils.utils import xndindex def resampleWW3spectra_on_TOPS_SAR_cartesian_grid(dsar, xspeckind): @@ -33,10 +34,12 @@ def resampleWW3spectra_on_TOPS_SAR_cartesian_grid(dsar, xspeckind): gridsar = { d: k for d, k in dsar.sizes.items() - #if d in ["burst", "tile_sample", "tile_line"] - if d in [ "tile_sample", "tile_line"] + # if d in ["burst", "tile_sample", "tile_line"] + if d in ["tile_sample", "tile_line"] } - if (xspeckind == "intra" and "xspectra_2tau_Re" in dsar) or (xspeckind == "inter" and "xspectra_Re" in dsar): # in a future version of L1B xspectra variable could be always present (even on land) but filled by NaN + if (xspeckind == "intra" and "xspectra_2tau_Re" in dsar) or ( + xspeckind == "inter" and "xspectra_Re" in dsar + ): # in a future version of L1B xspectra variable could be always present (even on land) but filled by NaN # symmetrize and combine Re+1j*Im for all the xspectra SAR if xspeckind == "intra": xsSAR = dsar["xspectra_2tau_Re"] + 1j * dsar["xspectra_2tau_Im"] @@ -106,7 +109,9 @@ def resampleWW3spectra_on_TOPS_SAR_cartesian_grid(dsar, xspeckind): ) # add the raw EFTH(f,dir) spectra from WW3 rawspww3 = ( - dsww3raw["efth"].isel(time=indiceww3spectra).rename("ww3EFTHraw") + dsww3raw["efth"] + .isel(time=indiceww3spectra) + .rename("ww3EFTHraw") ) rawspww3.attrs["description"] = "raw EFTH(f,dir) spectra" ds_ww3_cartesian = ds_ww3_cartesian.swap_dims( @@ -114,10 +119,12 @@ def resampleWW3spectra_on_TOPS_SAR_cartesian_grid(dsar, xspeckind): ).T rawspww3 = rawspww3.assign_coords(i) rawspww3 = rawspww3.expand_dims(["tile_line", "tile_sample"]) - #rawspww3 = rawspww3.expand_dims(["burst", "tile_line", "tile_sample"]) + # rawspww3 = rawspww3.expand_dims(["burst", "tile_line", "tile_sample"]) ds_ww3_cartesian = ds_ww3_cartesian.assign_coords(i) - ds_ww3_cartesian = ds_ww3_cartesian.expand_dims(["tile_line", "tile_sample"]) - #ds_ww3_cartesian = ds_ww3_cartesian.expand_dims(["burst", "tile_line", "tile_sample"]) + ds_ww3_cartesian = ds_ww3_cartesian.expand_dims( + ["tile_line", "tile_sample"] + ) + # ds_ww3_cartesian = ds_ww3_cartesian.expand_dims(["burst", "tile_line", "tile_sample"]) list_ww3_cart_sp.append(ds_ww3_cartesian) list_ww3_efth_sp.append(rawspww3) ds_ww3_cartesian_merged = xr.merge(list_ww3_cart_sp) @@ -136,7 +143,10 @@ def resampleWW3spectra_on_TOPS_SAR_cartesian_grid(dsar, xspeckind): else: raise ValueError("%s" % xspeckind) else: - logging.info('there is no xspectra in subswath %s',dsar.attrs['short_name'].split(':')[2]) + logging.info( + "there is no xspectra in subswath %s", + dsar.attrs["short_name"].split(":")[2], + ) return dsar, flag_ww3spectra_added, flag_ww3spectra_found diff --git a/slcl1butils/coloc/coloc_WV_WW3spectra.py b/slcl1butils/coloc/coloc_WV_WW3spectra.py index 62ea01d..fdfedcb 100644 --- a/slcl1butils/coloc/coloc_WV_WW3spectra.py +++ b/slcl1butils/coloc/coloc_WV_WW3spectra.py @@ -1,14 +1,15 @@ -import os -import pdb - -import xarray as xr import datetime import logging +import os + import numpy as np +import xarray as xr + +from slcl1butils.legacy_ocean.ocean.xPolarSpectrum import find_closest_ww3 + # from ocean.xspectrum import from_ww3 # from ocean.xPolarSpectrum import find_closest_ww3 from slcl1butils.legacy_ocean.ocean.xspectrum import from_ww3 -from slcl1butils.legacy_ocean.ocean.xPolarSpectrum import find_closest_ww3 from slcl1butils.symmetrize_l1b_spectra import symmetrize_xspectrum diff --git a/slcl1butils/compute/compute_from_l1b.py b/slcl1butils/compute/compute_from_l1b.py index 0ad66cf..c89c5c0 100644 --- a/slcl1butils/compute/compute_from_l1b.py +++ b/slcl1butils/compute/compute_from_l1b.py @@ -1,10 +1,11 @@ -import pdb +import logging import xarray as xr -import logging -def compute_xs_from_l1b(_file, burst_type='intra', time_separation='2tau')->(xr.DataArray,xr.Dataset): +def compute_xs_from_l1b( + _file, burst_type="intra", time_separation="2tau" +) -> (xr.DataArray, xr.Dataset): """ :params _file (str): full path L1B nc file @@ -18,55 +19,60 @@ def compute_xs_from_l1b(_file, burst_type='intra', time_separation='2tau')->(xr. """ # Reading the l1b file # Loading the specified burst group - if 'wv' in _file: + if "wv" in _file: ds = xr.open_dataset(_file, group=burst_type) else: - ds = xr.open_dataset(_file, group=burst_type + 'burst') + ds = xr.open_dataset(_file, group=burst_type + "burst") # ds = dt[burst_type+'burst_xspectra'].to_dataset() # drop variables - logging.debug('time_separation : %s', time_separation) - if burst_type == 'intra': + logging.debug("time_separation : %s", time_separation) + if burst_type == "intra": consolidated_list = [] - list_to_drop = ['var_xspectra_0tau', 'var_xspectra_1tau', 'var_xspectra_2tau'] + list_to_drop = ["var_xspectra_0tau", "var_xspectra_1tau", "var_xspectra_2tau"] for toto in range(0, 2): if int(time_separation[0]) != toto: - list_to_drop.append('xspectra_' + str(toto) + 'tau' + '_Re') - list_to_drop.append('xspectra_' + str(toto) + 'tau' + '_Im') + list_to_drop.append("xspectra_" + str(toto) + "tau" + "_Re") + list_to_drop.append("xspectra_" + str(toto) + "tau" + "_Im") for vv in list_to_drop: if vv not in ds: - logging.warning('%s not present in the dataset %s', vv, burst_type) + logging.warning("%s not present in the dataset %s", vv, burst_type) else: consolidated_list.append(vv) ds = ds.drop_vars(consolidated_list) else: # inter burst case pass # no variable to remove in interburst - if burst_type == 'intra' or burst_type == '': - if 'xspectra_' + time_separation + '_Re' not in ds or 'xspectra_' + time_separation + '_Im' not in ds: + if burst_type == "intra" or burst_type == "": + if ( + "xspectra_" + time_separation + "_Re" not in ds + or "xspectra_" + time_separation + "_Im" not in ds + ): xsRe = None xsIm = None else: - xsRe = ds['xspectra_' + time_separation + '_Re'] # +1j*ds_intra['xspectra_1tau_Im']).mean(dim=['1tau']) - xsIm = ds['xspectra_' + time_separation + '_Im'] - if time_separation == '2tau': - xsRe = xsRe.squeeze('2tau') - xsIm = xsIm.squeeze('2tau') - if time_separation == '1tau': - xsRe = xsRe.mean(dim=['1tau']) - xsIm = xsIm.mean(dim=['1tau']) - - elif burst_type == 'inter': - if 'xspectra_Re' in ds: - xsRe = ds['xspectra_Re'] # +1j*ds_inter['xspectra_Im'] - xsIm = ds['xspectra_Im'] + xsRe = ds[ + "xspectra_" + time_separation + "_Re" + ] # +1j*ds_intra['xspectra_1tau_Im']).mean(dim=['1tau']) + xsIm = ds["xspectra_" + time_separation + "_Im"] + if time_separation == "2tau": + xsRe = xsRe.squeeze("2tau") + xsIm = xsIm.squeeze("2tau") + if time_separation == "1tau": + xsRe = xsRe.mean(dim=["1tau"]) + xsIm = xsIm.mean(dim=["1tau"]) + + elif burst_type == "inter": + if "xspectra_Re" in ds: + xsRe = ds["xspectra_Re"] # +1j*ds_inter['xspectra_Im'] + xsIm = ds["xspectra_Im"] else: - logging.warning('xspectra_Re absent from interburst group') + logging.warning("xspectra_Re absent from interburst group") xsRe = None xsIm = None else: # WV case - raise Exception('not handle case') + raise Exception("not handle case") if xsRe is None: xs = None else: @@ -75,39 +81,43 @@ def compute_xs_from_l1b(_file, burst_type='intra', time_separation='2tau')->(xr. # xs=xs.squeeze() # convert the wavenumbers variables in range and azimuth into coordinates after selection of one unique vector without any other dimsension dependency dims_to_average = [] - if 'tile_sample' in xs.k_rg.dims: - dims_to_average.append('tile_sample') + if "tile_sample" in xs.k_rg.dims: + dims_to_average.append("tile_sample") - if 'burst' in xs.k_rg.dims: - dims_to_average.append('burst') + if "burst" in xs.k_rg.dims: + dims_to_average.append("burst") if "tile_line" in xs.k_rg.dims: - dims_to_average.append('tile_line') - xs = xs.assign_coords({'k_rg': xs.k_rg.mean(dim=dims_to_average)}) + dims_to_average.append("tile_line") + xs = xs.assign_coords({"k_rg": xs.k_rg.mean(dim=dims_to_average)}) # Replace the dimension name for frequencies - xs = xs.swap_dims({'freq_sample': 'k_rg', 'freq_line': 'k_az'}) + xs = xs.swap_dims({"freq_sample": "k_rg", "freq_line": "k_az"}) # Bug Fix to define the wavenumber in range direction. - xs.k_rg.attrs.update({'long_name': 'wavenumber in range direction', 'units': 'rad/m'}) + xs.k_rg.attrs.update( + {"long_name": "wavenumber in range direction", "units": "rad/m"} + ) return xs, ds -def compute_xs_from_l1b_wv(_file, time_separation='2tau'): +def compute_xs_from_l1b_wv(_file, time_separation="2tau"): # Reading the l1b file # Loading the specified burst group - ds = xr.open_dataset(_file, group='') + ds = xr.open_dataset(_file, group="") # ds = dt[burst_type+'burst_xspectra'].to_dataset() - xsRe = ds['xspectra_' + time_separation + '_Re'] # +1j*ds_intra['xspectra_1tau_Im']).mean(dim=['1tau']) - xsIm = ds['xspectra_' + time_separation + '_Im'] - if time_separation == '2tau': - xsRe = xsRe.squeeze('2tau') - xsIm = xsIm.squeeze('2tau') - if time_separation == '1tau': - xsRe = xsRe.mean(dim=['1tau']) - xsIm = xsIm.mean(dim=['1tau']) + xsRe = ds[ + "xspectra_" + time_separation + "_Re" + ] # +1j*ds_intra['xspectra_1tau_Im']).mean(dim=['1tau']) + xsIm = ds["xspectra_" + time_separation + "_Im"] + if time_separation == "2tau": + xsRe = xsRe.squeeze("2tau") + xsIm = xsIm.squeeze("2tau") + if time_separation == "1tau": + xsRe = xsRe.mean(dim=["1tau"]) + xsIm = xsIm.mean(dim=["1tau"]) xs = xsRe + 1j * xsIm @@ -116,8 +126,10 @@ def compute_xs_from_l1b_wv(_file, time_separation='2tau'): # convert the wavenumbers variables in range and azimuth into coordinates after selection of one unique vector without any other dimsension dependency # xs = xs.assign_coords({'k_rg': xs.k_rg}) # Replace the dimnesion name for frequencies - xs = xs.swap_dims({'freq_sample': 'k_rg', 'freq_line': 'k_az'}) + xs = xs.swap_dims({"freq_sample": "k_rg", "freq_line": "k_az"}) # Bug Fix to define the wavenumber in range direction. - xs.k_rg.attrs.update({'long_name': 'wavenumber in range direction', 'units': 'rad/m'}) + xs.k_rg.attrs.update( + {"long_name": "wavenumber in range direction", "units": "rad/m"} + ) return xs, ds diff --git a/slcl1butils/compute/homogeneous_output.py b/slcl1butils/compute/homogeneous_output.py index ea1f13f..305e404 100644 --- a/slcl1butils/compute/homogeneous_output.py +++ b/slcl1butils/compute/homogeneous_output.py @@ -1,11 +1,14 @@ -import pdb +import logging -from slcl1butils.get_config import get_conf -import xarray as xr import numpy as np -import logging +import xarray as xr + +from slcl1butils.get_config import get_conf + conf = get_conf() -def add_missing_variables(ds_intra,ds_inter)->(xr.Dataset,xr.Dataset): + + +def add_missing_variables(ds_intra, ds_inter) -> (xr.Dataset, xr.Dataset): """ Parameters @@ -19,11 +22,11 @@ def add_missing_variables(ds_intra,ds_inter)->(xr.Dataset,xr.Dataset): ds_inter (xr.Dataset) """ - for vv in conf['list_variables_expected_intra']: + for vv in conf["list_variables_expected_intra"]: if vv not in ds_intra: - attrs = conf['list_variables_expected_intra'][vv]['attrs'] + attrs = conf["list_variables_expected_intra"][vv]["attrs"] # dims_name = conf['list_variables_expected_intra'][vv]['dims'] - coords_name = conf['list_variables_expected_intra'][vv]['coords'] + coords_name = conf["list_variables_expected_intra"][vv]["coords"] # dims = {} # for di in dims_name: # if di in ds_intra.dims: @@ -39,13 +42,15 @@ def add_missing_variables(ds_intra,ds_inter)->(xr.Dataset,xr.Dataset): else: coords[co] = 1 dims = [co for co in coords.keys()] - ds_intra = ds_intra.assign({vv:xr.DataArray(coords=coords,dims=dims,attrs=attrs)}) - logging.info('empty %s added to intra',vv) - for vv in conf['list_variables_expected_inter']: + ds_intra = ds_intra.assign( + {vv: xr.DataArray(coords=coords, dims=dims, attrs=attrs)} + ) + logging.info("empty %s added to intra", vv) + for vv in conf["list_variables_expected_inter"]: if vv not in ds_inter: - attrs = conf['list_variables_expected_intra'][vv]['attrs'] - dims_name = conf['list_variables_expected_intra'][vv]['dims'] - coords_name = conf['list_variables_expected_intra'][vv]['coords'] + attrs = conf["list_variables_expected_intra"][vv]["attrs"] + # dims_name = conf["list_variables_expected_intra"][vv]["dims"] + coords_name = conf["list_variables_expected_intra"][vv]["coords"] # dims = {} # for di in dims_name: # if di in ds_inter.dims: @@ -59,6 +64,8 @@ def add_missing_variables(ds_intra,ds_inter)->(xr.Dataset,xr.Dataset): else: coords[co] = 1 dims = [co for co in coords.keys()] - ds_inter = ds_inter.assign({vv:xr.DataArray(coords=coords,dims=dims,attrs=attrs)}) - logging.info('empty %s added to intra',vv) - return ds_intra,ds_inter \ No newline at end of file + ds_inter = ds_inter.assign( + {vv: xr.DataArray(coords=coords, dims=dims, attrs=attrs)} + ) + logging.info("empty %s added to intra", vv) + return ds_intra, ds_inter diff --git a/slcl1butils/compute/stack_iw_l1b.py b/slcl1butils/compute/stack_iw_l1b.py index 3aed35a..c05d0df 100644 --- a/slcl1butils/compute/stack_iw_l1b.py +++ b/slcl1butils/compute/stack_iw_l1b.py @@ -1,7 +1,8 @@ -import xarray as xr import logging -import numpy as np import os + +import numpy as np +import xarray as xr from tqdm import tqdm diff --git a/slcl1butils/compute/stack_wv_l1c_monthly.py b/slcl1butils/compute/stack_wv_l1c_monthly.py index a4701fd..fd58651 100644 --- a/slcl1butils/compute/stack_wv_l1c_monthly.py +++ b/slcl1butils/compute/stack_wv_l1c_monthly.py @@ -3,111 +3,113 @@ A Grouazel Stack WV L1C data per month for stats and training purpose """ -import pdb - -import xarray as xr +import argparse +import datetime import glob import logging -import shutil -import numpy as np import os -import datetime +import shutil import time -import argparse -from slcl1butils.utils import get_memory_usage -from slcl1butils.get_config import get_conf + +import numpy as np +import xarray as xr from dateutil import relativedelta from tqdm import tqdm + +from slcl1butils.get_config import get_conf +from slcl1butils.utils import get_memory_usage + conf = get_conf() -GROUP_NAME_SAR = 'sar_sentinel1_wv_l1b_xsp_intraburst' -VARS_WW3_GRID = ['MAPSTA', - 'dpt', - 'ucur', - 'vcur', - 'uwnd', - 'vwnd', - 'ice', - 'ibg', - 'ic1', - 'ic5', - 'hs', - 'lm', - 't02', - 't0m1', - 't01', - 'fp', - 'dir', - 'spr', - 'dp', - 'phs0', - 'phs1', - 'phs2', - 'phs3', - 'phs4', - 'phs5', - 'ptp0', - 'ptp1', - 'ptp2', - 'ptp3', - 'ptp4', - 'ptp5', - 'plp0', - 'plp1', - 'plp2', - 'plp3', - 'plp4', - 'plp5', - 'pdir0', - 'pdir1', - 'pdir2', - 'pdir3', - 'pdir4', - 'pdir5', - 'pspr0', - 'pspr1', - 'pspr2', - 'pspr3', - 'pspr4', - 'pspr5', - 'pws0', - 'pws1', - 'pws2', - 'pws3', - 'pws4', - 'pws5', - 'pdp0', - 'pdp1', - 'pdp2', - 'pdp3', - 'pdp4', - 'pdp5', - 'tws', - 'uust', - 'vust', - 'cha', - 'cge', - 'faw', - 'utaw', - 'vtaw', - 'utwa', - 'vtwa', - 'wcc', - 'utwo', - 'vtwo', - 'foc', - 'utus', - 'vtus', - 'uuss', - 'vuss', - 'uabr', - 'vabr', - 'uubr', - 'vubr', - 'mssu', - 'mssc', - 'mssd'] -VARS_ECMF = ["U10", - "V10"] +GROUP_NAME_SAR = "sar_sentinel1_wv_l1b_xsp_intraburst" +VARS_WW3_GRID = [ + "MAPSTA", + "dpt", + "ucur", + "vcur", + "uwnd", + "vwnd", + "ice", + "ibg", + "ic1", + "ic5", + "hs", + "lm", + "t02", + "t0m1", + "t01", + "fp", + "dir", + "spr", + "dp", + "phs0", + "phs1", + "phs2", + "phs3", + "phs4", + "phs5", + "ptp0", + "ptp1", + "ptp2", + "ptp3", + "ptp4", + "ptp5", + "plp0", + "plp1", + "plp2", + "plp3", + "plp4", + "plp5", + "pdir0", + "pdir1", + "pdir2", + "pdir3", + "pdir4", + "pdir5", + "pspr0", + "pspr1", + "pspr2", + "pspr3", + "pspr4", + "pspr5", + "pws0", + "pws1", + "pws2", + "pws3", + "pws4", + "pws5", + "pdp0", + "pdp1", + "pdp2", + "pdp3", + "pdp4", + "pdp5", + "tws", + "uust", + "vust", + "cha", + "cge", + "faw", + "utaw", + "vtaw", + "utwa", + "vtwa", + "wcc", + "utwo", + "vtwo", + "foc", + "utus", + "vtus", + "uuss", + "vuss", + "uabr", + "vabr", + "uubr", + "vubr", + "mssu", + "mssc", + "mssd", +] +VARS_ECMF = ["U10", "V10"] VARS_SAR_L1B = [ "incidence", "ground_heading", @@ -528,32 +530,30 @@ def preprrocess( tmpdate = datetime.datetime.strptime( dsu.attrs["start_date"], "%Y-%m-%d %H:%M:%S.%f" ) - except: + except ValueError: tmpdate = datetime.datetime.strptime( dsu.attrs["start_date"], "%Y-%m-%d %H:%M:%S" ) # dsu['sardate'] = xr.DataArray(tmpdate) - if ( - "freq_sample" in dsu.coords - ): # when WV L1C are not symmetrized (no WW3 spectra addition) ie old version of the L1B->L1C processor - dsu = dsu.isel( - freq_sample=slice(0, indkrg_up), freq_line=slice(indkaz_bo, indkaz_up) - ) - else: - ind0_rg = get_k0(dsu['k_rg']) - ind45_rg = get_k45m(dsu['k_rg']) - distk0_to_k45 = (ind45_rg-ind0_rg) - borninf = ind0_rg-distk0_to_k45 - - ind0_az = get_k0(dsu['k_az']) - ind45_az = get_k45m(dsu['k_az']) - distk0_to_k45_az = (ind45_az-ind0_az) - borninf_az = ind0_az-distk0_to_k45_az - - dsu = dsu.isel( - k_rg=slice(borninf,ind45_rg), k_az=slice(borninf_az, ind45_az) - ) - logging.info('cropped cross spectra : size / %i x %i',dsu.k_rg.size,dsu.k_az.size) + # if ( + # "freq_sample" in dsu.coords + # ): # when WV L1C are not symmetrized (no WW3 spectra addition) ie old version of the L1B->L1C processor + # dsu = dsu.isel( + # freq_sample=slice(0, indkrg_up), freq_line=slice(indkaz_bo, indkaz_up) + # ) + # else: + ind0_rg = get_k0(dsu["k_rg"]) + ind45_rg = get_k45m(dsu["k_rg"]) + distk0_to_k45 = ind45_rg - ind0_rg + borninf = ind0_rg - distk0_to_k45 + + ind0_az = get_k0(dsu["k_az"]) + ind45_az = get_k45m(dsu["k_az"]) + distk0_to_k45_az = ind45_az - ind0_az + borninf_az = ind0_az - distk0_to_k45_az + + dsu = dsu.isel(k_rg=slice(borninf, ind45_rg), k_az=slice(borninf_az, ind45_az)) + logging.info("cropped cross spectra : size / %i x %i", dsu.k_rg.size, dsu.k_az.size) dsu = dsu.assign_coords({"k_rg": k_rg_ref.values, "k_az": k_az_ref.values}) dsu = dsu.assign_coords({"nc_number": nc_number}) dsu = dsu.assign_coords( @@ -566,11 +566,11 @@ def preprrocess( + 1j * dsu["xspectra_%stau_Im" % tautau] ) dsu = dsu.drop(["xspectra_%stau_Re" % tautau, "xspectra_%stau_Im" % tautau]) - dsu = dsu.drop('line') + dsu = dsu.drop("line") return dsu -def get_reference_wavenumbers(ds,indkaz_bo=None,indkaz_up=None): +def get_reference_wavenumbers(ds, indkaz_bo=None, indkaz_up=None): """ Parameters @@ -596,16 +596,15 @@ def get_reference_wavenumbers(ds,indkaz_bo=None,indkaz_up=None): else: # new version L1C WV symmetrized with WW3 sp ind0ref_rg = get_k0(k_rg_ref_symm) ind45ref_rg = get_k45m(k_rg_ref_symm) - distk0_to_k45 = (ind45ref_rg-ind0ref_rg) - borninf = ind0ref_rg-distk0_to_k45 - k_rg_ref = k_rg_ref_symm.isel({'k_rg':slice(borninf,ind45ref_rg)}) - + distk0_to_k45 = ind45ref_rg - ind0ref_rg + borninf = ind0ref_rg - distk0_to_k45 + k_rg_ref = k_rg_ref_symm.isel({"k_rg": slice(borninf, ind45ref_rg)}) ind0ref_az = get_k0(k_az) ind45ref_az = get_k45m(k_az) - distk0_to_k45_az = (ind45ref_az-ind0ref_az) - borninf = ind0ref_az-distk0_to_k45_az - k_az_ref = k_az.isel({'k_az':slice(borninf,ind45ref_az)}) + distk0_to_k45_az = ind45ref_az - ind0ref_az + borninf = ind0ref_az - distk0_to_k45_az + k_az_ref = k_az.isel({"k_az": slice(borninf, ind45ref_az)}) # k_az_ref = k_az.isel(k_az=slice(indkaz_bo, indkaz_up)) k_az_ref = k_az_ref.assign_coords({"k_az": np.arange(len(k_az_ref))}) k_az_ref = xr.DataArray( @@ -634,11 +633,13 @@ def get_index_wavenumbers(ds): indkaz_bo = np.argmin(abs(ds["k_az"].data + val_k_az)) return indkaz_bo, indkaz_up, indkrg_up, indkrg_bo + def get_k0(k): - return np.where(k.data==0.)[0][0] + return np.where(k.data == 0.0)[0][0] + def get_k45m(k): - return np.argmin(abs(k.data-0.1396)) + return np.argmin(abs(k.data - 0.1396)) def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): @@ -666,7 +667,7 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): if len(list_SAFEs) > 0: # for safe_xsp in list_SAFEs: - logging.info('loop over the SAFE : %i',len(list_SAFEs)) + logging.info("loop over the SAFE : %i", len(list_SAFEs)) for ssi in tqdm(range(len(list_SAFEs))): safe_xsp = list_SAFEs[ssi] lst_nc_l1c = glob.glob(os.path.join(safe_xsp, "*nc")) @@ -674,19 +675,20 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): if len(lst_nc_l1c) == 0: logging.warning("empty safe: %s", safe_xsp) else: - if ssi==0: + if ssi == 0: dt = xr.open_datatree(lst_nc_l1c[0]) vL1C = dt.attrs["L1C_product_version"] vL1B = dt.attrs["L1B_product_version"] - if keep_xspectrum is True and k_rg_ref is None: # code below executed only once thanks to this line + if ( + keep_xspectrum is True and k_rg_ref is None + ): # code below executed only once thanks to this line # dsfirst = xr.open_dataset(lst_nc_l1c[0], group='intraburst', engine='netcdf4', cache=False) dt = xr.open_datatree(lst_nc_l1c[0]) dsfirst = dt["intraburst"].to_dataset() # indkaz_bo, indkaz_up, indkrg_up, indkrg_bo = get_index_wavenumbers( # dsfirst # ) - k_rg_ref, k_az_ref = get_reference_wavenumbers( - dsfirst) + k_rg_ref, k_az_ref = get_reference_wavenumbers(dsfirst) for ff in lst_nc_l1c: if keep_xspectrum: tmpds = preprrocess( @@ -698,21 +700,31 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): ) else: tmpds = xr.open_dataset(ff, group="intraburst") - vars_to_drop = ['xspectra_2tau_Re','xspectra_2tau_Im','efth', - 'k_rg','k_az','nc_number'] # for foundation model exercise + vars_to_drop = [ + "xspectra_2tau_Re", + "xspectra_2tau_Im", + "efth", + "k_rg", + "k_az", + "nc_number", + ] # for foundation model exercise vars_to_drop_consolidated = [] for vv in tmpds: if vv not in VARS_TO_KEEP and vv not in vars_to_drop: vars_to_drop_consolidated.append(vv) - tmpds = tmpds.drop_vars(vars_to_drop_consolidated) # to avoid all the WW3 grid variables that are not mandatory for Hs regression or spectrum regression - tmpds = tmpds.drop_dims(['k_rg','k_az']) + tmpds = tmpds.drop_vars( + vars_to_drop_consolidated + ) # to avoid all the WW3 grid variables that are not mandatory for Hs regression or spectrum regression + tmpds = tmpds.drop_dims(["k_rg", "k_az"]) tmpsubswath = xr.DataArray( int(os.path.basename(ff).split("-")[1].replace("wv", "")) ) # wv1 -> , wv2 -> 2 # tmpds = tmpds.assign_coords({'wvmode': tmpsubswath}) tmpds["wvmode"] = xr.DataArray(tmpsubswath) - tmpds['wvmode'].attrs = {'description':'sub-swath of the Sentinel-1 SAR WV: wv1 or wv2'} + tmpds["wvmode"].attrs = { + "description": "sub-swath of the Sentinel-1 SAR WV: wv1 or wv2" + } tmpdate = datetime.datetime.strptime( tmpds.attrs["start_date"], "%Y-%m-%d %H:%M:%S.%f" ) @@ -720,11 +732,11 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): tmpds = tmpds.drop( [vv for vv in tmpds if "xspectra" in vv] + ["sample"] ) - if 'freq_line' in tmpds: + if "freq_line" in tmpds: tmpds = tmpds.drop_dims(["freq_line", "freq_sample"]) - tmpds.drop_vars(['corner_longitude','corner_latitude']) - tmpds = tmpds.drop_dims(['c_sample','c_line']) - tmpds = tmpds.drop('line') + tmpds.drop_vars(["corner_longitude", "corner_latitude"]) + tmpds = tmpds.drop_dims(["c_sample", "c_line"]) + tmpds = tmpds.drop("line") # tmpds = tmpds.assign_coords({"nc_number": cpt_nc}) tmpds = tmpds.assign_coords({"sardate": tmpdate}) # tmpds = tmpds.expand_dims('nc_number') @@ -733,7 +745,7 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): logging.info("nb dataset to combine: %s", cpt_nc) # ds = xr.concat(tmp, dim="sardate",coords='minimal') # ne fonctionne pas -> conflict in coords - ds = xr.combine_by_coords(tmp,combine_attrs="drop_conflicts") + ds = xr.combine_by_coords(tmp, combine_attrs="drop_conflicts") # # la ligne du dessous marche mais cest long je tente autre chsoe # ds = xr.combine_by_coords( @@ -746,27 +758,47 @@ def stack_wv_l1c_per_month(list_SAFEs, dev=False, keep_xspectrum=False): for vv in VARS_ECMF: if vv in ds: ds_ecmwf[vv] = ds[vv] - if 'U10' in ds_ecmwf: - ds_ecmwf['U10'].attrs['description'] = 'zonal component of wind speed from ECMWF hourly 0.1deg resolution forecasts' - ds_ecmwf['V10'].attrs['description'] = 'meridional component of wind speed from ECMWF hourly 0.1deg resolution forecasts' - ds_ecmwf['longitude'].attrs['description'] = 'longitude of the center of level-1B tile from Sentinel-1 WV imagette' - ds_ecmwf['latitude'].attrs['description'] = 'latitude of the center of level-1B tile from Sentinel-1 WV imagette' + if "U10" in ds_ecmwf: + ds_ecmwf["U10"].attrs[ + "description" + ] = "zonal component of wind speed from ECMWF hourly 0.1deg resolution forecasts" + ds_ecmwf["V10"].attrs[ + "description" + ] = "meridional component of wind speed from ECMWF hourly 0.1deg resolution forecasts" + ds_ecmwf["longitude"].attrs[ + "description" + ] = "longitude of the center of level-1B tile from Sentinel-1 WV imagette" + ds_ecmwf["latitude"].attrs[ + "description" + ] = "latitude of the center of level-1B tile from Sentinel-1 WV imagette" for vv in VARS_WW3_GRID: if vv in ds: ds_ww3[vv] = ds[vv] - ds_ww3[vv].attrs['source'] = 'WW3 numerical forecast 3-hourly 0.5 deg resolution' - if 'longitude' in ds_ww3: - ds_ww3['longitude'].attrs['description'] = 'longitude of the center of level-1B tile from Sentinel-1 WV imagette' - ds_ww3['latitude'].attrs['description'] = 'latitude of the center of level-1B tile from Sentinel-1 WV imagette' + ds_ww3[vv].attrs[ + "source" + ] = "WW3 numerical forecast 3-hourly 0.5 deg resolution" + if "longitude" in ds_ww3: + ds_ww3["longitude"].attrs[ + "description" + ] = "longitude of the center of level-1B tile from Sentinel-1 WV imagette" + ds_ww3["latitude"].attrs[ + "description" + ] = "latitude of the center of level-1B tile from Sentinel-1 WV imagette" for vv in VARS_SAR_L1B: if vv in ds: ds_sar_l1b[vv] = ds[vv] - ds_sar_l1b[vv].attrs['source'] = 'SAR Ifremer level-1B processor for WV SLC' - ds_sar_l1b['longitude'].attrs['description'] = 'longitude of the center of level-1B tile from Sentinel-1 WV imagette' - ds_sar_l1b['latitude'].attrs['description'] = 'latitude of the center of level-1B tile from Sentinel-1 WV imagette' - node_ecm = xr.DataTree(name="ecmwf_0100_1h", parent=dtnew, data=ds_ecmwf) - node_ww3 = xr.DataTree(name="ww3_global_yearly_3h", parent=dtnew, data=ds_ww3) - node_l1b = xr.DataTree(name=GROUP_NAME_SAR, parent=dtnew, data=ds_sar_l1b) + ds_sar_l1b[vv].attrs[ + "source" + ] = "SAR Ifremer level-1B processor for WV SLC" + ds_sar_l1b["longitude"].attrs[ + "description" + ] = "longitude of the center of level-1B tile from Sentinel-1 WV imagette" + ds_sar_l1b["latitude"].attrs[ + "description" + ] = "latitude of the center of level-1B tile from Sentinel-1 WV imagette" + _ = xr.DataTree(name="ecmwf_0100_1h", parent=dtnew, data=ds_ecmwf) + _ = xr.DataTree(name="ww3_global_yearly_3h", parent=dtnew, data=ds_ww3) + _ = xr.DataTree(name=GROUP_NAME_SAR, parent=dtnew, data=ds_sar_l1b) # dt['ecmwf_0100_1h'] = ds_ecmwf # dt['ww3_global_yearly_3h'] = ds_ww3 # dt['sentinel1_wv_l1b'] = ds_sar_l1b @@ -794,6 +826,8 @@ def save_to_zarr(stacked_ds, outputfile, L1C_version, L1B_version): stacked_ds.to_zarr(outputfile, mode="w") logging.info("successfully wrote stacked L1C %s", outputfile) return outputfile + + def save_to_netcdf(stackdt, outputfile, L1C_version, L1B_version): """ @@ -813,7 +847,7 @@ def save_to_netcdf(stackdt, outputfile, L1C_version, L1B_version): stackdt.attrs["creation_script"] = os.path.basename(__file__) logging.info("start saving to disk") # stackdt.to_netcdf(outputfile, engine='h5netcdf',mode="w") - stackdt.to_netcdf(outputfile, engine='netcdf4',mode="w") + stackdt.to_netcdf(outputfile, engine="netcdf4", mode="w") logging.info("successfully wrote stacked L1C %s", outputfile) return outputfile @@ -853,7 +887,11 @@ def main(): required=True, help="directory where L1C stacked files will be stored", ) - parser.add_argument('--finalarchive',help='the files are produce in args.outputdir and then moved in args.finalarchive (using mv)',required=True) + parser.add_argument( + "--finalarchive", + help="the files are produce in args.outputdir and then moved in args.finalarchive (using mv)", + required=True, + ) parser.add_argument( "--dev", action="store_true", @@ -898,7 +936,6 @@ def main(): if os.path.exists(outputfile) and args.overwrite is False: logging.info("%s already exists", outputfile) else: - lst_safes_month = get_all_l1c_for_a_month( startdate, stopdate, @@ -916,17 +953,19 @@ def main(): # L1C_version=vL1C, # L1B_version=vL1B, # ) - outputfile = save_to_netcdf(stackdt=stackdt, + outputfile = save_to_netcdf( + stackdt=stackdt, outputfile=outputfile, L1C_version=vL1C, - L1B_version=vL1B) + L1B_version=vL1B, + ) else: logging.info("no data available") if os.path.exists(outputfile): - archive_path = outputfile.replace(args.outputdir,args.finalarchive) - if outputfile!=archive_path: - logging.info('move file %s to %s',outputfile,archive_path) - shutil.move(outputfile,archive_path) + archive_path = outputfile.replace(args.outputdir, args.finalarchive) + if outputfile != archive_path: + logging.info("move file %s to %s", outputfile, archive_path) + shutil.move(outputfile, archive_path) logging.info("peak memory usage: %s Mbytes", get_memory_usage()) logging.info("done in %1.3f min", (time.time() - t0) / 60.0) diff --git a/slcl1butils/conversion_polar_cartesian.py b/slcl1butils/conversion_polar_cartesian.py index 0a07c21..54a4547 100644 --- a/slcl1butils/conversion_polar_cartesian.py +++ b/slcl1butils/conversion_polar_cartesian.py @@ -1,7 +1,10 @@ +import logging + import numpy as np import xarray as xr -import logging -def from_xCartesianSpectrum(ds, *, Nphi=120, ksampling='log',keep_nan=True, **kwargs): + + +def from_xCartesianSpectrum(ds, *, Nphi=120, ksampling="log", keep_nan=True, **kwargs): """ Convert a xCartesianSpectrum into a xPolarSpectrum. Grid of the return spectrum depends on the keywords arguments If 'size' and 'dr' are provided as keyword arguments, they are used prioritary. Otherwise, kmax and dk are used instead @@ -17,36 +20,46 @@ def from_xCartesianSpectrum(ds, *, Nphi=120, ksampling='log',keep_nan=True, **kw (xarray) : xPolarSpectrum """ - kmin = max(np.sort(np.abs(ds.kx))[1], np.sort(np.abs(ds.ky))[1]) # first element is zero - kmax = min(np.sort(np.abs(ds.kx))[-3], np.sort(np.abs(ds.ky))[-3]) # if last one is used, interpolation do not work well + kmin = max( + np.sort(np.abs(ds.kx))[1], np.sort(np.abs(ds.ky))[1] + ) # first element is zero + kmax = min( + np.sort(np.abs(ds.kx))[-3], np.sort(np.abs(ds.ky))[-3] + ) # if last one is used, interpolation do not work well - kwargs.update({'kmin':kmin, 'kmax':kmax, 'ksampling':ksampling, 'Nphi':Nphi}) - logging.debug('kwargs %s',kwargs) - if 'Nk' in kwargs: + kwargs.update({"kmin": kmin, "kmax": kmax, "ksampling": ksampling, "Nphi": Nphi}) + logging.debug("kwargs %s", kwargs) + if "Nk" in kwargs: k, dk, phi, dphi = SpectrumPolarGrid(**kwargs) - elif 'k' in kwargs: #add agrouaze - #logging.debug('second way to define k') - k = kwargs['k'] + elif "k" in kwargs: # add agrouaze + # logging.debug('second way to define k') + k = kwargs["k"] Nk = 60 dk = np.log(10) * (np.log10(kmax) - np.log10(kmin)) / (Nk - 1) * k - _,_,phi,dphi = SpectrumPolarGrid(**kwargs) + _, _, phi, dphi = SpectrumPolarGrid(**kwargs) else: - raise Exception('impossible to define the spectrum grid') - k = xr.DataArray(k, dims='k') - phi = xr.DataArray(phi, dims='phi') + raise Exception("impossible to define the spectrum grid") + k = xr.DataArray(k, dims="k") + phi = xr.DataArray(phi, dims="phi") - kx = k*np.cos(phi) - ky = k*np.sin(phi) + kx = k * np.cos(phi) + ky = k * np.sin(phi) if keep_nan: - myspec= ds.interp(kx=kx, ky=ky).assign_coords(k=k, phi=phi) + myspec = ds.interp(kx=kx, ky=ky).assign_coords(k=k, phi=phi) else: - myspec = ds.interp(kx=kx, ky=ky, kwargs={"fill_value": None}).assign_coords(k=k, phi=phi) # correction Grouazel May 2022 to avoid NaN in polar spectrum - myspec = myspec.assign_coords({'dk':xr.DataArray(dk, dims='k')}) if ksampling=='log' else myspec.assign_coords({'dk':xr.DataArray(dk)}) - myspec.attrs.update({'dphi':dphi}) - myspec.attrs.pop('dkx', None) - myspec.attrs.pop('dky', None) + myspec = ds.interp(kx=kx, ky=ky, kwargs={"fill_value": None}).assign_coords( + k=k, phi=phi + ) # correction Grouazel May 2022 to avoid NaN in polar spectrum + myspec = ( + myspec.assign_coords({"dk": xr.DataArray(dk, dims="k")}) + if ksampling == "log" + else myspec.assign_coords({"dk": xr.DataArray(dk)}) + ) + myspec.attrs.update({"dphi": dphi}) + myspec.attrs.pop("dkx", None) + myspec.attrs.pop("dky", None) - return myspec.drop(('kx','ky')) + return myspec.drop(("kx", "ky")) def SpectrumPolarGrid(**kwargs): @@ -80,24 +93,29 @@ def SpectrumPolarGrid(**kwargs): 'lin' : a linear sampling is applied """ - Nk = kwargs.pop('Nk', 400) - Nphi = kwargs.pop('Nphi', 120) - kmin = kwargs.pop('kmin', 2*np.pi/800.) - kmax = kwargs.pop('kmax', 4000.) - ksampling = kwargs.pop('ksampling', 'log') + Nk = kwargs.pop("Nk", 400) + Nphi = kwargs.pop("Nphi", 120) + kmin = kwargs.pop("kmin", 2 * np.pi / 800.0) + kmax = kwargs.pop("kmax", 4000.0) + ksampling = kwargs.pop("ksampling", "log") - #--------------Wavenumber initialization--------------------------- + # --------------Wavenumber initialization--------------------------- - if ksampling == 'log': - k = np.logspace(np.log10(kmin), np.log10(kmax),Nk) - dk = np.log(10)*(np.log10(kmax)-np.log10(kmin))/(Nk-1)*k - elif ksampling == 'lin': - k = np.linspace(kmin, kmax,Nk) + if ksampling == "log": + k = np.logspace(np.log10(kmin), np.log10(kmax), Nk) + dk = np.log(10) * (np.log10(kmax) - np.log10(kmin)) / (Nk - 1) * k + elif ksampling == "lin": + k = np.linspace(kmin, kmax, Nk) # dk = (kmax-kmin)/(Nk-1)*np.ones(k.shape) - dk = (kmax-kmin)/(Nk-1) + dk = (kmax - kmin) / (Nk - 1) else: - raise ValueError('Unknown k sampling method') + raise ValueError("Unknown k sampling method") - dphi = 2*np.pi/Nphi + dphi = 2 * np.pi / Nphi phi = np.arange(-np.pi, np.pi, dphi) - return k,dk, phi,dphi #, 'km':km, 'tm':tm, 'Nk':Nk, 'Nphi':Nphi, 'kmin':kmin, 'kmax':kmax, 'ksampling':ksampling \ No newline at end of file + return ( + k, + dk, + phi, + dphi, + ) # , 'km':km, 'tm':tm, 'Nk':Nk, 'Nphi':Nphi, 'kmin':kmin, 'kmax':kmax, 'ksampling':ksampling diff --git a/slcl1butils/get_config.py b/slcl1butils/get_config.py index f9547d1..07aaab5 100644 --- a/slcl1butils/get_config.py +++ b/slcl1butils/get_config.py @@ -1,8 +1,11 @@ -from yaml import load -import slcl1butils import logging import os + from yaml import CLoader as Loader +from yaml import load + +import slcl1butils + def get_conf(): """ @@ -10,15 +13,17 @@ def get_conf(): Returns: conf: dict """ - local_config_pontential_path = os.path.join(os.path.dirname(slcl1butils.__file__), 'localconfig.yaml') + local_config_pontential_path = os.path.join( + os.path.dirname(slcl1butils.__file__), "localconfig.yaml" + ) if os.path.exists(local_config_pontential_path): - logging.debug('local config used') + logging.debug("local config used") config_path = local_config_pontential_path else: - logging.debug('default config used') - config_path = os.path.join(os.path.dirname(slcl1butils.__file__), 'config.yaml') - logging.debug('config path: %s',config_path) - stream = open(config_path, 'r') + logging.debug("default config used") + config_path = os.path.join(os.path.dirname(slcl1butils.__file__), "config.yaml") + logging.debug("config path: %s", config_path) + stream = open(config_path, "r") conf = load(stream, Loader=Loader) return conf diff --git a/slcl1butils/get_polygons_from_l1b.py b/slcl1butils/get_polygons_from_l1b.py index 93fc04a..eb677a7 100644 --- a/slcl1butils/get_polygons_from_l1b.py +++ b/slcl1butils/get_polygons_from_l1b.py @@ -1,15 +1,18 @@ +import logging + #!/usr/bin/env python import numpy as np -from shapely import geometry -from shapely import wkt import xarray as xr -import logging +from shapely import geometry, wkt + from slcl1butils.utils import xndindex -polygons_varnames = ['swath', 'tiles', 'bursts'] +polygons_varnames = ["swath", "tiles", "bursts"] -def get_swath_tiles_polygons_from_l1bgroup(l1b_ds,polarization, swath_only=False, ik=0, burst_type='intra', **kwargs): +def get_swath_tiles_polygons_from_l1bgroup( + l1b_ds, polarization, swath_only=False, ik=0, burst_type="intra", **kwargs +): """ get polygons for a given group of L1B SAR IFREMER product Args: @@ -31,48 +34,65 @@ def get_swath_tiles_polygons_from_l1bgroup(l1b_ds,polarization, swath_only=False poly_bursts = [] itile_samples = [] itile_lines = [] - variable_names = kwargs.get('variable_names', None) + variable_names = kwargs.get("variable_names", None) if variable_names: if variable_names[0] is not None: for variable_name in variable_names: variables[variable_name] = [] # Swath_polynom - poly_swath = wkt.loads(l1b_ds.attrs['footprint']) - polygons['swath'] = [poly_swath] + poly_swath = wkt.loads(l1b_ds.attrs["footprint"]) + polygons["swath"] = [poly_swath] if swath_only: return polygons, coordinates, variables - logging.debug('l1b_ds : %s', l1b_ds) + logging.debug("l1b_ds : %s", l1b_ds) # Tiles polynom & variables sizes_without_pol = {} - for ss in l1b_ds['sigma0'].sizes: - if ss != 'pol': - sizes_without_pol[ss] = l1b_ds['sigma0'].sizes[ss] - indexX = xndindex(l1b_ds['sigma0'].sizes) + for ss in l1b_ds["sigma0"].sizes: + if ss != "pol": + sizes_without_pol[ss] = l1b_ds["sigma0"].sizes[ss] + indexX = xndindex(l1b_ds["sigma0"].sizes) gege = xndindex( - sizes_without_pol) # whatever tthere is or not tile_line and tile_sample and burst, it loops on it + sizes_without_pol + ) # whatever tthere is or not tile_line and tile_sample and burst, it loops on it for uu in gege: # iburst = uu['burst'] - if 'tile_line' in uu: - itile_line = uu['tile_line'] + if "tile_line" in uu: + itile_line = uu["tile_line"] else: itile_line = np.nan - if 'tile_sample' in uu: - itile_sample = uu['tile_sample'] + if "tile_sample" in uu: + itile_sample = uu["tile_sample"] else: itile_sample = np.nan # for iburst in l1b_ds['burst'].values: - if burst_type == 'intra': - if 'burst_corner_longitude' in l1b_ds: - burst_lon_corners = l1b_ds['burst_corner_longitude'].sel(tile_line=itile_line).values.flatten().tolist() - burst_lat_corners = l1b_ds['burst_corner_latitude'].sel(tile_line=itile_line).values.flatten().tolist() + if burst_type == "intra": + if "burst_corner_longitude" in l1b_ds: + burst_lon_corners = ( + l1b_ds["burst_corner_longitude"] + .sel(tile_line=itile_line) + .values.flatten() + .tolist() + ) + burst_lat_corners = ( + l1b_ds["burst_corner_latitude"] + .sel(tile_line=itile_line) + .values.flatten() + .tolist() + ) if np.sum((~np.isfinite(burst_lon_corners))) == 0: # Define the burst polygons order = [0, 1, 3, 2, 0] poly_burst = geometry.Polygon( - np.stack([np.array(burst_lon_corners)[order], np.array(burst_lat_corners)[order]]).T) + np.stack( + [ + np.array(burst_lon_corners)[order], + np.array(burst_lat_corners)[order], + ] + ).T + ) # pts_burst = [geometry.Point(burst_lon_corners[cpt],burst_lat_corners[cpt]) for cpt,_ in enumerate(burst_lon_corners)] # pts_burst = [pts_burst[0], pts_burst[1], pts_burst[3], pts_burst[2]] # poly_burst = geometry.Polygon(pts_burst) @@ -84,19 +104,24 @@ def get_swath_tiles_polygons_from_l1bgroup(l1b_ds,polarization, swath_only=False # tile_line=itile_line).values.flatten().tolist() # lat_corners = l1b_ds['corner_latitude'].sel(burst=iburst, tile_sample=itile_sample, # tile_line=itile_line).values.flatten().tolist() - lon_corners = l1b_ds['corner_longitude'].sel(uu).values.flatten().tolist() - lat_corners = l1b_ds['corner_latitude'].sel(uu).values.flatten().tolist() + lon_corners = l1b_ds["corner_longitude"].sel(uu).values.flatten().tolist() + lat_corners = l1b_ds["corner_latitude"].sel(uu).values.flatten().tolist() # Check on the lon/lat corners validity # print(np.sum((~np.isfinite(lon_corners)))) if np.sum((~np.isfinite(lon_corners))) == 0: # Define the tile polygons - pts = [geometry.Point(lon_corners[cpt], lat_corners[cpt]) for cpt, _ in enumerate(lon_corners)] + pts = [ + geometry.Point(lon_corners[cpt], lat_corners[cpt]) + for cpt, _ in enumerate(lon_corners) + ] pts = [pts[0], pts[1], pts[3], pts[2], pts[0]] - logging.debug('pts: %s', pts) - logging.debug('one pt : %s %s', pts[0], type(pts[0])) + logging.debug("pts: %s", pts) + logging.debug("one pt : %s %s", pts[0], type(pts[0])) order = [0, 1, 3, 2, 0] - poly_tile = geometry.Polygon(np.stack([np.array(lon_corners)[order], np.array(lat_corners)[order]]).T) + poly_tile = geometry.Polygon( + np.stack([np.array(lon_corners)[order], np.array(lat_corners)[order]]).T + ) # poly_tile = geometry.Polygon(pts) poly_tiles.append(poly_tile) # Coordinates @@ -104,49 +129,49 @@ def get_swath_tiles_polygons_from_l1bgroup(l1b_ds,polarization, swath_only=False itile_samples.append(itile_sample) itile_lines.append(itile_line) if variable_names: - if (variable_names[0] is not None): + if variable_names[0] is not None: for variable_name in variable_names: tmp_array = l1b_ds[variable_name].sel(uu) - if 'pol' in tmp_array.coords: + if "pol" in tmp_array.coords: tmp_array = tmp_array.sel(pol=polarization) - if (variable_name == 'macs_Im'): + if variable_name == "macs_Im": values_addon = float(tmp_array.values[ik]) else: values_addon = float(tmp_array.values) variables[variable_name].append(values_addon) - polygons['tiles'] = poly_tiles - polygons['bursts'] = poly_bursts + polygons["tiles"] = poly_tiles + polygons["bursts"] = poly_bursts # # coordinates['ibursts'] = ibursts - coordinates['itile_samples'] = itile_samples - coordinates['itile_lines'] = itile_lines + coordinates["itile_samples"] = itile_samples + coordinates["itile_lines"] = itile_lines return polygons, coordinates, variables -def get_swath_tiles_polygons_from_l1bfile(l1b_file,polarization, ik=0, **kwargs): +def get_swath_tiles_polygons_from_l1bfile(l1b_file, polarization, ik=0, **kwargs): """ - get polygons for all the groups in a L1B SAR IFREMER file - Args: - l1b_file: str full path of L1B product .nc - ik: int [optional], index of wave number selected for variable displayed such as 'cwave' or 'imacs' - Keyword Args: - kwargs (dict): optional keyword arguments : variable_names (list), is valid entry - Returns: + get polygons for all the groups in a L1B SAR IFREMER file + Args: + l1b_file: str full path of L1B product .nc + ik: int [optional], index of wave number selected for variable displayed such as 'cwave' or 'imacs' + Keyword Args: + kwargs (dict): optional keyword arguments : variable_names (list), is valid entry + Returns: - """ + """ swath_only = False # Initialisation of output structures _variables = None polygons = {} coordinates = {} variables = {} - burst_types = ['intra', 'inter'] + burst_types = ["intra", "inter"] # coordinates_varnames = ['ibursts', 'itile_samples', 'itile_lines'] - coordinates_varnames = [ 'itile_samples', 'itile_lines'] - variable_names = kwargs.get('variable_names', None) + coordinates_varnames = ["itile_samples", "itile_lines"] + variable_names = kwargs.get("variable_names", None) for burst_type in burst_types: # polygons polygons[burst_type] = {} @@ -163,47 +188,59 @@ def get_swath_tiles_polygons_from_l1bfile(l1b_file,polarization, ik=0, **kwargs) for variable_name in variable_names: variables[burst_type][variable_name] = [] - burst_types = ['intra', 'inter'] + burst_types = ["intra", "inter"] for burst_type in burst_types: - l1b_ds = xr.open_dataset(l1b_file, group=burst_type + 'burst') + l1b_ds = xr.open_dataset(l1b_file, group=burst_type + "burst") if variable_names: if variable_names[0] is not None: - _polygons, _coordinates, _variables = get_swath_tiles_polygons_from_l1bgroup(l1b_ds,polarization, - variable_names=variable_names, - ik=ik) + ( + _polygons, + _coordinates, + _variables, + ) = get_swath_tiles_polygons_from_l1bgroup( + l1b_ds, polarization, variable_names=variable_names, ik=ik + ) else: - _polygons, _coordinates, _variables = get_swath_tiles_polygons_from_l1bgroup(l1b_ds,polarization) + ( + _polygons, + _coordinates, + _variables, + ) = get_swath_tiles_polygons_from_l1bgroup(l1b_ds, polarization) # polygons for polygons_varname in polygons_varnames: - polygons[burst_type][polygons_varname] = polygons[burst_type][polygons_varname] + _polygons[ - polygons_varname] + polygons[burst_type][polygons_varname] = ( + polygons[burst_type][polygons_varname] + _polygons[polygons_varname] + ) # coordinates for coordinates_varname in coordinates_varnames: - coordinates[burst_type][coordinates_varname] = coordinates[burst_type][coordinates_varname] + _coordinates[ - coordinates_varname] + coordinates[burst_type][coordinates_varname] = ( + coordinates[burst_type][coordinates_varname] + + _coordinates[coordinates_varname] + ) # variables if variable_names: if variable_names[0] is not None: for variable_name in variable_names: - variables[burst_type][variable_name] = variables[burst_type][variable_name] + _variables[ - variable_name] + variables[burst_type][variable_name] = ( + variables[burst_type][variable_name] + _variables[variable_name] + ) return polygons, coordinates, variables -def get_swath_tiles_polygons_from_l1bfiles(l1b_files,polarization, ik=0, **kwargs): +def get_swath_tiles_polygons_from_l1bfiles(l1b_files, polarization, ik=0, **kwargs): """ - get polygons for all the groups in a set of L1B SAR IFREMER files - Args: - l1b_file: str full path of L1B product .nc - polarization: str VV or ... - ik: int [optional], index of wave number selected for variable displayed such as 'cwave' or 'imacs' - Keyword Args: - kwargs (dict): optional keyword arguments : variable_names (list), is valid entry - Returns: + get polygons for all the groups in a set of L1B SAR IFREMER files + Args: + l1b_file: str full path of L1B product .nc + polarization: str VV or ... + ik: int [optional], index of wave number selected for variable displayed such as 'cwave' or 'imacs' + Keyword Args: + kwargs (dict): optional keyword arguments : variable_names (list), is valid entry + Returns: - """ + """ # Initialisation of output structures polygons = {} coordinates = {} @@ -211,10 +248,10 @@ def get_swath_tiles_polygons_from_l1bfiles(l1b_files,polarization, ik=0, **kwarg _polygons = {} _coordinates = None _variables = None - burst_types = ['intra', 'inter'] + burst_types = ["intra", "inter"] # coordinates_varnames = ['ibursts', 'itile_samples', 'itile_lines'] - coordinates_varnames = [ 'itile_samples', 'itile_lines'] - variable_names = kwargs.get('variable_names', None) + coordinates_varnames = ["itile_samples", "itile_lines"] + variable_names = kwargs.get("variable_names", None) for burst_type in burst_types: # polygons polygons[burst_type] = {} @@ -235,27 +272,39 @@ def get_swath_tiles_polygons_from_l1bfiles(l1b_files,polarization, ik=0, **kwarg # Read the file if variable_names: if variable_names[0] is not None: - _polygons, _coordinates, _variables = get_swath_tiles_polygons_from_l1bfile(l1b_file,polarization, - variable_names=variable_names, - ik=ik) + ( + _polygons, + _coordinates, + _variables, + ) = get_swath_tiles_polygons_from_l1bfile( + l1b_file, polarization, variable_names=variable_names, ik=ik + ) else: - _polygons, _coordinates, _variables = get_swath_tiles_polygons_from_l1bfile(l1b_file,polarization) + _polygons, _coordinates, _variables = get_swath_tiles_polygons_from_l1bfile( + l1b_file, polarization + ) # Fill the output for each burst_type for burst_type in burst_types: # polygons for polygons_varname in polygons_varnames: - polygons[burst_type][polygons_varname] = polygons[burst_type][polygons_varname] + _polygons[burst_type][ - polygons_varname] + polygons[burst_type][polygons_varname] = ( + polygons[burst_type][polygons_varname] + + _polygons[burst_type][polygons_varname] + ) # coordinates for coordinates_varname in coordinates_varnames: - coordinates[burst_type][coordinates_varname] = coordinates[burst_type][coordinates_varname] + \ - _coordinates[burst_type][coordinates_varname] + coordinates[burst_type][coordinates_varname] = ( + coordinates[burst_type][coordinates_varname] + + _coordinates[burst_type][coordinates_varname] + ) # variables if variable_names: if variable_names[0] is not None: for variable_name in variable_names: - variables[burst_type][variable_name] = variables[burst_type][variable_name] + \ - _variables[burst_type][variable_name] + variables[burst_type][variable_name] = ( + variables[burst_type][variable_name] + + _variables[burst_type][variable_name] + ) return polygons, coordinates, variables diff --git a/slcl1butils/plotting/add_azimuth_cutoff_lines_on_polar_spec_fig.py b/slcl1butils/plotting/add_azimuth_cutoff_lines_on_polar_spec_fig.py index 82411de..3527fe8 100644 --- a/slcl1butils/plotting/add_azimuth_cutoff_lines_on_polar_spec_fig.py +++ b/slcl1butils/plotting/add_azimuth_cutoff_lines_on_polar_spec_fig.py @@ -3,7 +3,9 @@ May 2022, validated """ import numpy as np -def add_azimuth_cutoff_lines(ax,tra,limit_wl_plot,azc): + + +def add_azimuth_cutoff_lines(ax, tra, limit_wl_plot, azc): """ :param ax: pyplot axis for polar plot @@ -12,17 +14,33 @@ def add_azimuth_cutoff_lines(ax,tra,limit_wl_plot,azc): :param azc: float: azimuth cutoff computed on a cross spectrum [m] :return: """ - R = 2*np.pi/limit_wl_plot - Azc = 2*np.pi/azc - ttt = np.radians(tra-180.) - theta0 = np.arctan(Azc/R) - #theta0bis = np.arctan2(R,Azc) the pi/2 complementary angle - distance = np.sqrt(R**2+Azc**2) - theta1 = (np.pi/2.)-theta0 + R = 2 * np.pi / limit_wl_plot + Azc = 2 * np.pi / azc + ttt = np.radians(tra - 180.0) + theta0 = np.arctan(Azc / R) + # theta0bis = np.arctan2(R,Azc) the pi/2 complementary angle + distance = np.sqrt(R**2 + Azc**2) + theta1 = (np.pi / 2.0) - theta0 theta3 = theta1 + ttt theta4 = -theta1 + ttt - ax.plot([theta3,theta4],[distance,distance],zorder=100000,color='violet',lw=1.5,alpha=0.5,linestyle='--') - theta1 = (np.pi/2.)+theta0 + ax.plot( + [theta3, theta4], + [distance, distance], + zorder=100000, + color="violet", + lw=1.5, + alpha=0.5, + linestyle="--", + ) + theta1 = (np.pi / 2.0) + theta0 theta3 = theta1 + ttt theta4 = -theta1 + ttt - ax.plot([theta3,theta4],[distance,distance],zorder=100000,color='violet',lw=1.5,alpha=0.5,linestyle='--') \ No newline at end of file + ax.plot( + [theta3, theta4], + [distance, distance], + zorder=100000, + color="violet", + lw=1.5, + alpha=0.5, + linestyle="--", + ) diff --git a/slcl1butils/plotting/display_one_spectra.py b/slcl1butils/plotting/display_one_spectra.py index 90534b1..9bcd8a1 100644 --- a/slcl1butils/plotting/display_one_spectra.py +++ b/slcl1butils/plotting/display_one_spectra.py @@ -1,34 +1,105 @@ - -import os import logging -from matplotlib import pyplot as plt -from matplotlib import colors as mcolors -cmap = mcolors.LinearSegmentedColormap.from_list("", ["white","violet","mediumpurple","cyan","springgreen","yellow","red"]) +import os + import numpy as np -from slcl1butils import conversion_polar_cartesian -from slcl1butils import spectrum_clockwise_to_trigo +from matplotlib import colors as mcolors +from matplotlib import pyplot as plt +from xsarslc.processing.xspectra import symmetrize_xspectrum + +from slcl1butils import ( + conversion_polar_cartesian, + spectrum_clockwise_to_trigo, + spectrum_rotation, +) + +cmap = mcolors.LinearSegmentedColormap.from_list( + "", ["white", "violet", "mediumpurple", "cyan", "springgreen", "yellow", "red"] +) try: import display_xspectra_grid -except: - pass #TODO fix this dependency keep or let -from slcl1butils import spectrum_rotation -# from slcl1butils.symmetrize_l1b_spectra import symmetrize_xspectrum -from xsarslc.processing.xspectra import symmetrize_xspectrum -#import slcl1butils.plotting.add_azimuth_cutoff_lines_on_polar_spec_fig #wavetoolbox -reference_oswK_1145m_60pts = np.array([0.005235988, 0.00557381, 0.005933429, 0.00631625, 0.00672377, - 0.007157583, 0.007619386, 0.008110984, 0.008634299, 0.009191379, - 0.0097844, 0.01041568, 0.0110877, 0.01180307, 0.01256459, 0.01337525, - 0.01423822, 0.01515686, 0.01613477, 0.01717577, 0.01828394, 0.01946361, - 0.02071939, 0.02205619, 0.02347924, 0.02499411, 0.02660671, 0.02832336, - 0.03015076, 0.03209607, 0.03416689, 0.03637131, 0.03871796, 0.04121602, - 0.04387525, 0.04670605, 0.0497195, 0.05292737, 0.05634221, 0.05997737, - 0.06384707, 0.06796645, 0.0723516, 0.07701967, 0.08198893, 0.08727881, - 0.09290998, 0.09890447, 0.1052857, 0.1120787, 0.1193099, 0.1270077, - 0.1352022, 0.1439253, 0.1532113, 0.1630964, 0.1736193, 0.1848211, - 0.1967456, 0.2094395]) - -def core_plot_part(crossSpectraRePol,heading,fig=None,ax=None,thecmap=None,limit_display_wv=100,title='x spec', - tauval=2,azimuth_cutoffSAR=None,vmin=None,vmax=None,add_colorbar=True): +except ImportError: + pass # TODO fix this dependency keep or let +# import slcl1butils.plotting.add_azimuth_cutoff_lines_on_polar_spec_fig #wavetoolbox +reference_oswK_1145m_60pts = np.array( + [ + 0.005235988, + 0.00557381, + 0.005933429, + 0.00631625, + 0.00672377, + 0.007157583, + 0.007619386, + 0.008110984, + 0.008634299, + 0.009191379, + 0.0097844, + 0.01041568, + 0.0110877, + 0.01180307, + 0.01256459, + 0.01337525, + 0.01423822, + 0.01515686, + 0.01613477, + 0.01717577, + 0.01828394, + 0.01946361, + 0.02071939, + 0.02205619, + 0.02347924, + 0.02499411, + 0.02660671, + 0.02832336, + 0.03015076, + 0.03209607, + 0.03416689, + 0.03637131, + 0.03871796, + 0.04121602, + 0.04387525, + 0.04670605, + 0.0497195, + 0.05292737, + 0.05634221, + 0.05997737, + 0.06384707, + 0.06796645, + 0.0723516, + 0.07701967, + 0.08198893, + 0.08727881, + 0.09290998, + 0.09890447, + 0.1052857, + 0.1120787, + 0.1193099, + 0.1270077, + 0.1352022, + 0.1439253, + 0.1532113, + 0.1630964, + 0.1736193, + 0.1848211, + 0.1967456, + 0.2094395, + ] +) + + +def core_plot_part( + crossSpectraRePol, + heading, + fig=None, + ax=None, + thecmap=None, + limit_display_wv=100, + title="x spec", + tauval=2, + azimuth_cutoffSAR=None, + vmin=None, + vmax=None, + add_colorbar=True, +): """ Parameters ---------- @@ -39,15 +110,16 @@ def core_plot_part(crossSpectraRePol,heading,fig=None,ax=None,thecmap=None,limit """ # partie display plot - if fig is None : - plt.figure(figsize=(20,8),dpi=100) - if ax is None : - ax = plt.subplot(1,1,1,polar=True) + if fig is None: + plt.figure(figsize=(20, 8), dpi=100) + if ax is None: + ax = plt.subplot(1, 1, 1, polar=True) ax.set_theta_direction(-1) # theta increasing clockwise ax.set_theta_zero_location("N") - crossSpectraRe_redpol = crossSpectraRePol.where(np.abs(crossSpectraRePol.k) <= 2 * np.pi / limit_display_wv, - drop=True) + crossSpectraRe_redpol = crossSpectraRePol.where( + np.abs(crossSpectraRePol.k) <= 2 * np.pi / limit_display_wv, drop=True + ) # shift_phi = np.radians(heading-90) @@ -55,30 +127,57 @@ def core_plot_part(crossSpectraRePol,heading,fig=None,ax=None,thecmap=None,limit # crossSpectraRe_redpol_phi_new = crossSpectraRe_redpol.assign_coords({'phi':(crossSpectraRe_redpol.phi.values+shift_phi)[::-1]}) # crossSpectraRe_redpol_phi_new = crossSpectraRe_redpol_phi_new/45000. - im = crossSpectraRe_redpol_phi_new.plot(cmap=thecmap,alpha=0.8,vmin=vmin,vmax=vmax,add_colorbar=add_colorbar) - #plt.pcolor(crossSpectraRe_redpol_phi_new.phi,crossSpectraRe_redpol_phi_new.k,crossSpectraRe_redpol_phi_new,cmap=thecmap) + im = crossSpectraRe_redpol_phi_new.plot( + cmap=thecmap, alpha=0.8, vmin=vmin, vmax=vmax, add_colorbar=add_colorbar + ) + # plt.pcolor(crossSpectraRe_redpol_phi_new.phi,crossSpectraRe_redpol_phi_new.k,crossSpectraRe_redpol_phi_new,cmap=thecmap) plt.grid(True) - plt.plot(np.radians([heading,heading]),(0.01,max(crossSpectraRe_redpol.k)),'r-') - plt.plot(np.radians([heading + 180,heading + 180]),(0.01,max(crossSpectraRe_redpol.k)),'r-') - plt.plot(np.radians([heading + 90,heading + 90]),(0.01,max(crossSpectraRe_redpol.k)),'r-') - plt.plot(np.radians([heading + 270,heading + 270]),(0.01,max(crossSpectraRe_redpol.k)),'r-') - ax.text(np.radians(heading),(0.65 + (np.cos(np.radians(heading)) < 0) * 0.25) * max(crossSpectraRe_redpol.k), - ' Azimuth',size=18,color='red',rotation=-heading + 90 + (np.sin(np.radians(heading)) < 0) * 180, - ha='center') - ax.text(np.radians(heading + 90),0.80 * max(crossSpectraRe_redpol.k),' Range',size=18,color='red', - rotation=-heading + (np.cos(np.radians(heading)) < 0) * 180,ha='center') - display_xspectra_grid.circle_plot(ax,r=[100,200,400]) + plt.plot(np.radians([heading, heading]), (0.01, max(crossSpectraRe_redpol.k)), "r-") + plt.plot( + np.radians([heading + 180, heading + 180]), + (0.01, max(crossSpectraRe_redpol.k)), + "r-", + ) + plt.plot( + np.radians([heading + 90, heading + 90]), + (0.01, max(crossSpectraRe_redpol.k)), + "r-", + ) + plt.plot( + np.radians([heading + 270, heading + 270]), + (0.01, max(crossSpectraRe_redpol.k)), + "r-", + ) + ax.text( + np.radians(heading), + (0.65 + (np.cos(np.radians(heading)) < 0) * 0.25) + * max(crossSpectraRe_redpol.k), + " Azimuth", + size=18, + color="red", + rotation=-heading + 90 + (np.sin(np.radians(heading)) < 0) * 180, + ha="center", + ) + ax.text( + np.radians(heading + 90), + 0.80 * max(crossSpectraRe_redpol.k), + " Range", + size=18, + color="red", + rotation=-heading + (np.cos(np.radians(heading)) < 0) * 180, + ha="center", + ) + display_xspectra_grid.circle_plot(ax, r=[100, 200, 400]) if azimuth_cutoffSAR: - add_azimuth_cutoff_lines_on_polar_spec_fig.add_azimuth_cutoff_lines(ax=ax, - tra=heading, - limit_wl_plot=limit_display_wv, - azc=azimuth_cutoffSAR) + add_azimuth_cutoff_lines_on_polar_spec_fig.add_azimuth_cutoff_lines( + ax=ax, tra=heading, limit_wl_plot=limit_display_wv, azc=azimuth_cutoffSAR + ) ax.set_rmax(2.0 * np.pi / limit_display_wv) - ax.set_rmin(0) #security!! - if title is None : - plt.title('tau : %s' % tauval,fontsize=18) - else : - plt.title(title,fontsize=18) + ax.set_rmin(0) # security!! + if title is None: + plt.title("tau : %s" % tauval, fontsize=18) + else: + plt.title(title, fontsize=18) return im @@ -97,29 +196,40 @@ def add_polar_direction_lines(deg_step=30): # maxx = 0.06 offset_angle = -90 - for yy,theta in enumerate(np.arange(0,360,deg_step)): - x1,y1 = pol2cart(maxx,np.radians(theta)) - x2,y2 = pol2cart(maxx,np.radians((theta+180)%360)) - x3,y3 = pol2cart(0.06,np.radians((theta))) - x = np.array([x1,x2]) - y = np.array([y1,y2]) - x = np.array([x2,x1]) - y = np.array([y2,y1]) - - plt.plot(x,y,c='grey',alpha=0.3,lw=1) - #rotn = np.degrees(np.arctan2(y[1:]-y[:-1], x[1:]-x[:-1]))[0] - rotn = theta #label - rotn = (offset_angle+rotn)%360 - #if rotn>0 and rotn<300: - if abs(rotn)>90: #on retourne le label si ca depasse 90 ou -90 - rotn = (rotn+180)%180 - plt.annotate("%i°"%np.rint(rotn), xy=(x3, y3), rotation=rotn,fontsize=6,color='grey') + for yy, theta in enumerate(np.arange(0, 360, deg_step)): + x1, y1 = pol2cart(maxx, np.radians(theta)) + x2, y2 = pol2cart(maxx, np.radians((theta + 180) % 360)) + x3, y3 = pol2cart(0.06, np.radians((theta))) + x = np.array([x2, x1]) + y = np.array([y2, y1]) + + plt.plot(x, y, c="grey", alpha=0.3, lw=1) + # rotn = np.degrees(np.arctan2(y[1:]-y[:-1], x[1:]-x[:-1]))[0] + rotn = theta # label + rotn = (offset_angle + rotn) % 360 + # if rotn>0 and rotn<300: + if abs(rotn) > 90: # on retourne le label si ca depasse 90 ou -90 + rotn = (rotn + 180) % 180 + plt.annotate( + "%i°" % np.rint(rotn), xy=(x3, y3), rotation=rotn, fontsize=6, color="grey" + ) # if yy==20: # break -def display_polar_spectra(allspecs,heading,part='Re',limit_display_wv=100,title=None,outputfile=None, - fig=None,ax=None,interactive=False,tau_id=2,varname='cross-spectrum_%stau'): +def display_polar_spectra( + allspecs, + heading, + part="Re", + limit_display_wv=100, + title=None, + outputfile=None, + fig=None, + ax=None, + interactive=False, + tau_id=2, + varname="cross-spectrum_%stau", +): """ :param allspecs: @@ -135,23 +245,26 @@ def display_polar_spectra(allspecs,heading,part='Re',limit_display_wv=100,title= :return: """ - for tauval in [tau_id] : - if part == 'Im': - thecmap = 'PuOr' - #if isinstance(allspecs,xarray.DataArray): - if '2tau' not in allspecs.dims: + for tauval in [tau_id]: + if part == "Im": + thecmap = "PuOr" + # if isinstance(allspecs,xarray.DataArray): + if "2tau" not in allspecs.dims: coS = allspecs else: - if allspecs[varname % tauval].dtype in [np.complex_, np.complex]: - coS = allspecs[varname % tauval].mean( - dim='%stau' % tauval).imag # co spectrum = imag part of cross-spectrum + if allspecs[varname % tauval].dtype in [np.complex_, np.complex]: + coS = ( + allspecs[varname % tauval].mean(dim="%stau" % tauval).imag + ) # co spectrum = imag part of cross-spectrum else: - coS = allspecs[varname % tauval].mean( - dim='%stau' % tauval) # co spectrum = imag part of cross-spectrum + coS = allspecs[varname % tauval].mean( + dim="%stau" % tauval + ) # co spectrum = imag part of cross-spectrum else: thecmap = cmap - coS = abs(allspecs[varname % tauval].mean( - dim='%stau' % tauval).real) # co spectrum = real part of cross-spectrum + coS = abs( + allspecs[varname % tauval].mean(dim="%stau" % tauval).real + ) # co spectrum = real part of cross-spectrum # new_spec_Polar = xsarsea.conversion_polar_cartesian.from_xCartesianSpectrum(coS,Nphi=72, # ksampling='log', # **{'Nk' : 60, @@ -161,40 +274,62 @@ def display_polar_spectra(allspecs,heading,part='Re',limit_display_wv=100,title= # 'kmax' : # reference_oswK_1145m_60pts[ # -1]}) - new_spec_Polar = conversion_polar_cartesian.from_xCartesianSpectrum(coS,Nphi=72, - ksampling='log', - **{'k' : reference_oswK_1145m_60pts}) + new_spec_Polar = conversion_polar_cartesian.from_xCartesianSpectrum( + coS, Nphi=72, ksampling="log", **{"k": reference_oswK_1145m_60pts} + ) # new_spec_Polar = xsarsea.conversion_polar_cartesian.from_xCartesianSpectrum(coS,Nphi=72, # ksampling='log', # **{'k' : reference_oswK_1145m_60pts}) - #new_spec_Polar.assign_coords({'k':reference_oswK_1145m_60pts}) # test agrouaze + # new_spec_Polar.assign_coords({'k':reference_oswK_1145m_60pts}) # test agrouaze crossSpectraRePol = new_spec_Polar.squeeze() - crossSpectraRePol = spectrum_clockwise_to_trigo.apply_clockwise_to_trigo(crossSpectraRePol) - #crossSpectraRePol = spectrum_rotation.apply_rotation(crossSpectraRePol,-90.) # This is for having origin at North # 1dec21 change +90 to -90 on a case descending - crossSpectraRePol = spectrum_rotation.apply_rotation(crossSpectraRePol, -270.) # sept 22: it seems consistent with what we see in the cart spectra, while -90 is opposite direction - crossSpectraRePol = spectrum_rotation.apply_rotation(crossSpectraRePol,heading) - if part == 'Re': + crossSpectraRePol = spectrum_clockwise_to_trigo.apply_clockwise_to_trigo( + crossSpectraRePol + ) + # crossSpectraRePol = spectrum_rotation.apply_rotation(crossSpectraRePol,-90.) # This is for having origin at North # 1dec21 change +90 to -90 on a case descending + crossSpectraRePol = spectrum_rotation.apply_rotation( + crossSpectraRePol, -270.0 + ) # sept 22: it seems consistent with what we see in the cart spectra, while -90 is opposite direction + crossSpectraRePol = spectrum_rotation.apply_rotation(crossSpectraRePol, heading) + if part == "Re": crossSpectraRePol = abs(crossSpectraRePol) - im = core_plot_part(crossSpectraRePol,heading,fig=fig,ax=ax,thecmap=thecmap, - limit_display_wv=limit_display_wv,title=title, - tauval=tauval) + im = core_plot_part( + crossSpectraRePol, + heading, + fig=fig, + ax=ax, + thecmap=thecmap, + limit_display_wv=limit_display_wv, + title=title, + tauval=tauval, + ) if outputfile: plt.savefig(outputfile) - logging.info('outputfile : %s',outputfile) + logging.info("outputfile : %s", outputfile) else: if interactive: pass - #plt.show() + # plt.show() return im - - - -def plot_a_single_xspec_cart_L1B_IW(ds,polarization,tile_line_i,tile_sample_i,title,fig,cat_xspec='intra',part='Re', - rotation=False,orbit_pass=None,platform_heading=None,dpi=100,figsize=(8,6), - outputfile=None,skip_symmetrize=False): +def plot_a_single_xspec_cart_l1b_iw( + ds, + polarization, + tile_line_i, + tile_sample_i, + title, + fig, + cat_xspec="intra", + part="Re", + rotation=False, + orbit_pass=None, + platform_heading=None, + dpi=100, + figsize=(8, 6), + outputfile=None, + skip_symmetrize=False, +): """ Parameters @@ -214,28 +349,35 @@ def plot_a_single_xspec_cart_L1B_IW(ds,polarization,tile_line_i,tile_sample_i,ti """ import matplotlib - from scipy import ndimage, misc + from scipy import ndimage + nb_lines = 1 nb_burst = 1 nb_sample = 1 cpt_spec = 1 limit_display_wv = 50 - circles_wavelength = [50,100,200,300,400,500] + circles_wavelength = [50, 100, 200, 300, 400, 500] tau_id = 2 add_colorbar = True - #fig = plt.figure() - if cat_xspec == 'inter': - #varname = cat_xspec+'burst_xspectra_'+part - varname = cat_xspec + 'burst_xspectra' + # fig = plt.figure() + if cat_xspec == "inter": + # varname = cat_xspec+'burst_xspectra_'+part + varname = cat_xspec + "burst_xspectra" else: - #varname = 'xspectra_%stau_'+part - varname = 'xspectra_%stau'%tau_id # it is mandatory to recombined Re+1j*Im dans the L1B - set_xspec = ds[{'tile_line':tile_line_i,'tile_sample':tile_sample_i}].sel({'pol':polarization})[varname] - if 'k_az' in set_xspec.dims or skip_symmetrize is True: - print('symmetrize and swap_dims already done') + # varname = 'xspectra_%stau_'+part + varname = ( + "xspectra_%stau" % tau_id + ) # it is mandatory to recombined Re+1j*Im dans the L1B + set_xspec = ds[{"tile_line": tile_line_i, "tile_sample": tile_sample_i}].sel( + {"pol": polarization} + )[varname] + if "k_az" in set_xspec.dims or skip_symmetrize is True: + print("symmetrize and swap_dims already done") else: - set_xspec = set_xspec.swap_dims({'freq_line': 'k_az', 'freq_sample': 'k_rg'}) - set_xspec = symmetrize_xspectrum(set_xspec, dim_range='k_rg', dim_azimuth='k_az') # + set_xspec = set_xspec.swap_dims({"freq_line": "k_az", "freq_sample": "k_rg"}) + set_xspec = symmetrize_xspectrum( + set_xspec, dim_range="k_rg", dim_azimuth="k_az" + ) # # scales = (0, 5, 0, 5) # t = Affine2D().rotate_deg(25) @@ -244,42 +386,70 @@ def plot_a_single_xspec_cart_L1B_IW(ds,polarization,tile_line_i,tile_sample_i,ti # h = floating_axes.GridHelperCurveLinear(t, scales) # ax = floating_axes.FloatingSubplot(fig, 111, grid_helper=h) - plt.figure(dpi=dpi,figsize=figsize) - #fig.add_subplot(ax) + plt.figure(dpi=dpi, figsize=figsize) + # fig.add_subplot(ax) ax = plt.subplot(nb_lines * nb_burst, nb_sample, cpt_spec) - im = display_cartesian_spectra(set_xspec,part=part,cat_xspec=cat_xspec,limit_display_wv=limit_display_wv, - title=title,outputfile=outputfile, - fig=fig,ax=ax,interactive=False,tau_id=tau_id,varname=varname,rolling=True,title_fontsize=8, - circles_wavelength=circles_wavelength,add_colorbar=add_colorbar,kx_varname='k_rg',ky_varname='k_az',dpi=dpi) # - if orbit_pass == 'Descending': + _ = display_cartesian_spectra( + set_xspec, + part=part, + cat_xspec=cat_xspec, + limit_display_wv=limit_display_wv, + title=title, + outputfile=outputfile, + fig=fig, + ax=ax, + interactive=False, + tau_id=tau_id, + varname=varname, + rolling=True, + title_fontsize=8, + circles_wavelength=circles_wavelength, + add_colorbar=add_colorbar, + kx_varname="k_rg", + ky_varname="k_az", + dpi=dpi, + ) # + if orbit_pass == "Descending": ax.invert_xaxis() ax.invert_yaxis() - plt.axis('scaled') # to make the circle looks like circles + plt.axis("scaled") # to make the circle looks like circles if rotation: - #if True: # I need to save image on disk to rotate it - outf = '/tmp/spectra_cart_iw_tiff_one_tile.png' + # if True: # I need to save image on disk to rotate it + outf = "/tmp/spectra_cart_iw_tiff_one_tile.png" plt.savefig(outf) img = matplotlib.pyplot.imread(outf) - if platform_heading<-45: + if platform_heading < -45: rot_ang = -(180 + platform_heading) else: rot_ang = platform_heading - #plt.figure() + # plt.figure() img_45 = ndimage.rotate(img, rot_ang, reshape=True) plt.figure() plt.imshow(img_45, cmap=plt.cm.gray) - #plt.axvline(x=0.5) - plt.quiver(50,200,0,60,color='k',scale=500,headwidth=10,headlength=12) - plt.text(52,150,'North') - plt.axis('off') + # plt.axvline(x=0.5) + plt.quiver(50, 200, 0, 60, color="k", scale=500, headwidth=10, headlength=12) + plt.text(52, 150, "North") + plt.axis("off") plt.show() return set_xspec -def plot_a_single_xspec_cart_L1B_WV(ds,title,fig=None,ax=None,part='Re' - ,orbit_pass=None,platform_heading=None,dpi=100,figsize=(8,6), - outputfile=None,limit_display_wv = 50, - circles_wavelength = [50, 100, 200, 300, 400, 500],vmax=None): + +def plot_a_single_xspec_cart_L1B_WV( + ds, + title, + fig=None, + ax=None, + part="Re", + orbit_pass=None, + platform_heading=None, + dpi=100, + figsize=(8, 6), + outputfile=None, + limit_display_wv=50, + circles_wavelength=[50, 100, 200, 300, 400, 500], + vmax=None, +): """ :param ds: @@ -296,24 +466,27 @@ def plot_a_single_xspec_cart_L1B_WV(ds,title,fig=None,ax=None,part='Re' :return: """ - tau_id = 2 add_colorbar = True # fig = plt.figure() - varname = 'xspectra_%stau' # it is mandatory to recombined Re+1j*Im dans the L1B + varname = "xspectra_%stau" # it is mandatory to recombined Re+1j*Im dans the L1B if varname not in ds: - logging.info('conjugate Re+Im') + logging.info("conjugate Re+Im") for tautau in range(3): - ds['xspectra_%stau' % tautau] = ds['xspectra_%stau_Re' % tautau] + 1j * ds['xspectra_%stau_Im' % tautau] - ds = ds.drop(['xspectra_%stau_Re' % tautau, 'xspectra_%stau_Im' % tautau]) + ds["xspectra_%stau" % tautau] = ( + ds["xspectra_%stau_Re" % tautau] + 1j * ds["xspectra_%stau_Im" % tautau] + ) + ds = ds.drop(["xspectra_%stau_Re" % tautau, "xspectra_%stau_Im" % tautau]) - set_xspec = ds[varname%tau_id] - if 'k_az' in set_xspec.dims: + set_xspec = ds[varname % tau_id] + if "k_az" in set_xspec.dims: pass else: - set_xspec = set_xspec.swap_dims({'freq_line': 'k_az', 'freq_sample': 'k_rg'}) - set_xspec = symmetrize_xspectrum(set_xspec, dim_range='k_rg', dim_azimuth='k_az') - ds[varname%tau_id] = set_xspec + set_xspec = set_xspec.swap_dims({"freq_line": "k_az", "freq_sample": "k_rg"}) + set_xspec = symmetrize_xspectrum( + set_xspec, dim_range="k_rg", dim_azimuth="k_az" + ) + ds[varname % tau_id] = set_xspec # scales = (0, 5, 0, 5) # t = Affine2D().rotate_deg(25) @@ -325,25 +498,55 @@ def plot_a_single_xspec_cart_L1B_WV(ds,title,fig=None,ax=None,part='Re' # fig.add_subplot(ax) if ax is None: ax = plt.subplot(111) - im = display_cartesian_spectra(ds, part=part, cat_xspec='intra', limit_display_wv=limit_display_wv, - title=title, outputfile=outputfile, - fig=fig, ax=ax, interactive=False, tau_id=tau_id, varname=varname, rolling=True, - title_fontsize=8, - circles_wavelength=circles_wavelength, add_colorbar=add_colorbar, kx_varname='k_rg', - ky_varname='k_az', dpi=dpi,vmax=vmax) # - if orbit_pass == 'Descending': + _ = display_cartesian_spectra( + ds, + part=part, + cat_xspec="intra", + limit_display_wv=limit_display_wv, + title=title, + outputfile=outputfile, + fig=fig, + ax=ax, + interactive=False, + tau_id=tau_id, + varname=varname, + rolling=True, + title_fontsize=8, + circles_wavelength=circles_wavelength, + add_colorbar=add_colorbar, + kx_varname="k_rg", + ky_varname="k_az", + dpi=dpi, + vmax=vmax, + ) # + if orbit_pass == "Descending": ax.invert_xaxis() ax.invert_yaxis() - plt.axis('scaled') # to make the circle looks like circles + plt.axis("scaled") # to make the circle looks like circles return set_xspec - - -def display_cartesian_spectra(allspecs,part='Re',cat_xspec='intra',limit_display_wv=100,title=None,outputfile=None, - fig=None,ax=None,interactive=False,tau_id=2,varname='cross-spectrum_%stau',title_fontsize=12, - kx_varname='kx',ky_varname='ky',rolling=False,dpi=100,circles_wavelength=[50,100,200,400], - add_colorbar=True,vmax=None): +def display_cartesian_spectra( + allspecs, + part="Re", + cat_xspec="intra", + limit_display_wv=100, + title=None, + outputfile=None, + fig=None, + ax=None, + interactive=False, + tau_id=2, + varname="cross-spectrum_%stau", + title_fontsize=12, + kx_varname="kx", + ky_varname="ky", + rolling=False, + dpi=100, + circles_wavelength=[50, 100, 200, 400], + add_colorbar=True, + vmax=None, +): """ Parameters @@ -373,24 +576,27 @@ def display_cartesian_spectra(allspecs,part='Re',cat_xspec='intra',limit_display # for tauval in [tau_id] : tauval = tau_id - if cat_xspec == 'intra': + if cat_xspec == "intra": # varname = varname% tauval - if part == 'Im': - thecmap = 'PuOr' - if allspecs.dtype in [np.complex_, np.complex,np.complex64]: + if part == "Im": + thecmap = "PuOr" + if allspecs.dtype in [np.complex_, np.complex, np.complex64]: coS = allspecs.mean( - dim='%stau' % tauval).imag # co spectrum = imag part of cross-spectrum + dim="%stau" % tauval + ).imag # co spectrum = imag part of cross-spectrum else: coS = allspecs.mean( - dim='%stau' % tauval) # co spectrum = imag part of cross-spectrum + dim="%stau" % tauval + ) # co spectrum = imag part of cross-spectrum else: thecmap = cmap - coS = abs(allspecs.mean( - dim='%stau' % tauval).real) # co spectrum = real part of cross-spectrum + coS = abs( + allspecs.mean(dim="%stau" % tauval).real + ) # co spectrum = real part of cross-spectrum - else:# inter burst case (without tau dimension) - if part == 'Im': - thecmap = 'PuOr' + else: # inter burst case (without tau dimension) + if part == "Im": + thecmap = "PuOr" if allspecs.dtype in [np.complex_, np.complex]: coS = allspecs.imag # co spectrum = imag part of cross-spectrum else: @@ -399,10 +605,19 @@ def display_cartesian_spectra(allspecs,part='Re',cat_xspec='intra',limit_display thecmap = cmap coS = abs(allspecs) crossSpectraRe_red = coS.where( - np.logical_and(np.abs(coS[kx_varname]) <= 0.14,np.abs(coS[ky_varname]) <= 0.14),drop=True) - #crossSpectraRe_red = crossSpectraRe_red.rolling(kx=3,center=True).mean().rolling(ky=3,center=True).mean() + np.logical_and( + np.abs(coS[kx_varname]) <= 0.14, np.abs(coS[ky_varname]) <= 0.14 + ), + drop=True, + ) + # crossSpectraRe_red = crossSpectraRe_red.rolling(kx=3,center=True).mean().rolling(ky=3,center=True).mean() if rolling: - crossSpectraRe_red = crossSpectraRe_red.rolling({kx_varname:3}, center=True).mean().rolling({ky_varname:3}, center=True).mean() + crossSpectraRe_red = ( + crossSpectraRe_red.rolling({kx_varname: 3}, center=True) + .mean() + .rolling({ky_varname: 3}, center=True) + .mean() + ) # filter high wavelength (for test) # crossSpectraRe_red = crossSpectraRe_red.where( @@ -414,59 +629,71 @@ def display_cartesian_spectra(allspecs,part='Re',cat_xspec='intra',limit_display # crossSpectraRe_red = filter_cartesian_with_wavelength_ring(lower_wl, higher_wl, crossSpectraRe_red) # partie display plot if fig is None: - plt.figure(figsize=(10,8),dpi=dpi) + plt.figure(figsize=(10, 8), dpi=dpi) if ax is None: - - ax = plt.subplot(1,1,1,polar=False) + ax = plt.subplot(1, 1, 1, polar=False) # add azimuth / range red axis # plt.axhline(y=0) - plt.plot([0, 0], [-2.*np.pi/limit_display_wv, 2.*np.pi/limit_display_wv], 'c--',alpha=0.7,) - plt.text(-0.001,0.05,'azimuth',color='c',rotation=90, va='center',fontsize=7) - plt.plot([-2.*np.pi/limit_display_wv, 2.*np.pi/limit_display_wv], [0, 0], 'r--',alpha=0.7) - plt.text(0.05, 0.004, 'range', color='r', rotation=0, va='center',fontsize=7) + plt.plot( + [0, 0], + [-2.0 * np.pi / limit_display_wv, 2.0 * np.pi / limit_display_wv], + "c--", + alpha=0.7, + ) + plt.text(-0.001, 0.05, "azimuth", color="c", rotation=90, va="center", fontsize=7) + plt.plot( + [-2.0 * np.pi / limit_display_wv, 2.0 * np.pi / limit_display_wv], + [0, 0], + "r--", + alpha=0.7, + ) + plt.text(0.05, 0.004, "range", color="r", rotation=0, va="center", fontsize=7) crossSpectraRe_red = crossSpectraRe_red.transpose(ky_varname, kx_varname) - im = crossSpectraRe_red.plot(cmap=thecmap,alpha=0.8,add_colorbar=add_colorbar,vmax=vmax) + im = crossSpectraRe_red.plot( + cmap=thecmap, alpha=0.8, add_colorbar=add_colorbar, vmax=vmax + ) add_cartesian_wavelength_circles(default_values=circles_wavelength) - - plt.legend(loc=1) plt.grid(True) - plt.xlim(-2.0*np.pi/limit_display_wv,2.0*np.pi/limit_display_wv) - plt.ylim(-2.0*np.pi/limit_display_wv,2.0*np.pi/limit_display_wv) - #display_xspectra_grid.circle_plot(ax,r=[300,400,500,600]) - #ax.set_rmax(2.0*np.pi/limit_display_wv) + plt.xlim(-2.0 * np.pi / limit_display_wv, 2.0 * np.pi / limit_display_wv) + plt.ylim(-2.0 * np.pi / limit_display_wv, 2.0 * np.pi / limit_display_wv) + # display_xspectra_grid.circle_plot(ax,r=[300,400,500,600]) + # ax.set_rmax(2.0*np.pi/limit_display_wv) if title is None: - plt.title('tau : %s' % tauval,fontsize=title_fontsize) + plt.title("tau : %s" % tauval, fontsize=title_fontsize) else: - plt.title(title,fontsize=title_fontsize) + plt.title(title, fontsize=title_fontsize) if outputfile: - plt.axis('scaled') # to make the circle looks like circles + plt.axis("scaled") # to make the circle looks like circles filename, file_extension = os.path.splitext(outputfile) - plt.savefig(outputfile,format=file_extension.replace('.','')) - logging.info('outputfile : %s',outputfile) + plt.savefig(outputfile, format=file_extension.replace(".", "")) + logging.info("outputfile : %s", outputfile) else: if interactive: - #plt.show() + # plt.show() pass return im + + def cart2pol(x, y): - rho = np.sqrt(x ** 2 + y ** 2) + rho = np.sqrt(x**2 + y**2) phi = np.arctan2(y, x) return (rho, phi) + def pol2cart(rho, phi): x = rho * np.cos(phi) y = rho * np.sin(phi) return (x, y) -def add_cartesian_wavelength_circles(default_values=[100,300,600]): +def add_cartesian_wavelength_circles(default_values=[100, 300, 600]): N_pts = 100 phi = np.linspace(0, 2 * np.pi, N_pts) for rru in default_values: r100 = np.ones(N_pts) * 2 * np.pi / rru - #r300 = np.ones(N_pts) * 2 * np.pi / 300 + # r300 = np.ones(N_pts) * 2 * np.pi / 300 coords_cart_100 = [] for uu in range(N_pts): @@ -478,5 +705,4 @@ def add_cartesian_wavelength_circles(default_values=[100,300,600]): # coords_cart_300.append(pol2cart(r300[uu], phi[uu])) # coords_cart_300 = np.array(coords_cart_300) # plt.plot(coords_cart_300[:, 0], coords_cart_300[:, 1], label='300 m') - plt.plot(coords_cart_100[:, 0], coords_cart_100[:, 1],'--', label='%s m'%rru) - + plt.plot(coords_cart_100[:, 0], coords_cart_100[:, 1], "--", label="%s m" % rru) diff --git a/slcl1butils/plotting/display_xspectra_grid.py b/slcl1butils/plotting/display_xspectra_grid.py index aaa6a4c..06fc1a6 100644 --- a/slcl1butils/plotting/display_xspectra_grid.py +++ b/slcl1butils/plotting/display_xspectra_grid.py @@ -1,14 +1,19 @@ -from matplotlib import pyplot as plt -import os -import numpy as np import logging +import os import sys + +import numpy as np from matplotlib import colors as mcolors -from slcl1butils.conversion_polar_cartesian import from_xCartesianSpectrum -from slcl1butils import spectrum_clockwise_to_trigo -from slcl1butils import spectrum_rotation +from matplotlib import pyplot as plt from spectra_plot_circles_wavenumbers import circle_plot -def draw_signal_and_grid(slc,splitting_image,one_tiff,variable_displayed='digital_number'): + +from slcl1butils import spectrum_clockwise_to_trigo, spectrum_rotation +from slcl1butils.conversion_polar_cartesian import from_xCartesianSpectrum + + +def draw_signal_and_grid( + slc, splitting_image, one_tiff, variable_displayed="digital_number" +): """ :param slc: xarray Dataset with digital number from SLC S1 prod @@ -17,252 +22,352 @@ def draw_signal_and_grid(slc,splitting_image,one_tiff,variable_displayed='digita :return: """ - plt.figure(figsize=(15,10),dpi=100) - plt.title(os.path.basename(one_tiff),fontsize=20) - + plt.figure(figsize=(15, 10), dpi=100) + plt.title(os.path.basename(one_tiff), fontsize=20) - - if variable_displayed=='digital_number': - sli = slice(None,None,5) - vals = slc['digital_number'].values.squeeze()[sli,sli] - azi_coords = slc['azimuth'].values[sli] - range_coords = slc['range'].values[sli] - print(vals.shape,type(vals),vals.dtype) - vmin = np.percentile(vals.real.ravel(),15) - vmax = np.percentile(vals.real,85) - print(vmin,vmax) - print(vals.real.max(),vals.real.min()) - levels = np.linspace(vmin,vmax,40) - plt.contourf(range_coords,azi_coords,vals.real,levels,cmap='Greys_r') # ,vmin=-1500,vmax=1500 + if variable_displayed == "digital_number": + sli = slice(None, None, 5) + vals = slc["digital_number"].values.squeeze()[sli, sli] + azi_coords = slc["azimuth"].values[sli] + range_coords = slc["range"].values[sli] + print(vals.shape, type(vals), vals.dtype) + vmin = np.percentile(vals.real.ravel(), 15) + vmax = np.percentile(vals.real, 85) + print(vmin, vmax) + print(vals.real.max(), vals.real.min()) + levels = np.linspace(vmin, vmax, 40) + plt.contourf( + range_coords, azi_coords, vals.real, levels, cmap="Greys_r" + ) # ,vmin=-1500,vmax=1500 else: step = 10 - sys.path.append('/home1/datahome/agrouaze/git/cerbere') - sys.path.append('/home1/datahome/agrouaze/git/sar/') + sys.path.append("/home1/datahome/agrouaze/git/cerbere") + sys.path.append("/home1/datahome/agrouaze/git/sar/") from cerbere.mapper.safegeotifffile import SAFEGeoTiffFile from sar.data.sarimage import SARImage + imagefile = SAFEGeoTiffFile(url=one_tiff) surcouche = SARImage(imagefile) - #pixelsspacing = surcouche.meters2pixels(1000) #1km grid spacing - #lons = surcouche.get_data('lon',spacing=pixelsspacing) - sigma0cerb = surcouche.get_data('sigma0') - print('coords atrack',slc['atrack'].values) - logging.info('expected shape (xsar): %s',slc['sigma0'].shape) - logging.info('current shape(cerbre) : %s ',sigma0cerb.shape) - #sigma0 = slc['sigma0'].values.squeeze()[: :step,: :step] - diffatrack = slc['sigma0'].values.shape[1]-sigma0cerb.shape[0] - diffxtrack = slc['sigma0'].values.shape[2]-sigma0cerb.shape[1] - logging.info('padding vals: %s %s',diffatrack,diffxtrack) - if diffatrack<0 and diffxtrack>0: - sigma0cerb_pad = np.pad(sigma0cerb[0:diffatrack,:],((0,0),(0,diffxtrack))) - elif diffatrack>0 and diffxtrack<0 : - sigma0cerb_pad = np.pad(sigma0cerb[:,0:diffxtrack],((0,diffatrack),(0,0))) + # pixelsspacing = surcouche.meters2pixels(1000) #1km grid spacing + # lons = surcouche.get_data('lon',spacing=pixelsspacing) + sigma0cerb = surcouche.get_data("sigma0") + print("coords atrack", slc["atrack"].values) + logging.info("expected shape (xsar): %s", slc["sigma0"].shape) + logging.info("current shape(cerbre) : %s ", sigma0cerb.shape) + # sigma0 = slc['sigma0'].values.squeeze()[: :step,: :step] + diffatrack = slc["sigma0"].values.shape[1] - sigma0cerb.shape[0] + diffxtrack = slc["sigma0"].values.shape[2] - sigma0cerb.shape[1] + logging.info("padding vals: %s %s", diffatrack, diffxtrack) + if diffatrack < 0 and diffxtrack > 0: + sigma0cerb_pad = np.pad( + sigma0cerb[0:diffatrack, :], ((0, 0), (0, diffxtrack)) + ) + elif diffatrack > 0 and diffxtrack < 0: + sigma0cerb_pad = np.pad( + sigma0cerb[:, 0:diffxtrack], ((0, diffatrack), (0, 0)) + ) else: - sigma0cerb_pad = np.pad(sigma0cerb,((0,diffatrack),(0,diffxtrack))) - sigma0cerb_pad = sigma0cerb_pad.reshape((1,sigma0cerb_pad.shape[0],sigma0cerb_pad.shape[1])) - logging.info('sigma0cerb_pad %s mean = %s',sigma0cerb_pad.shape,np.nanmean(sigma0cerb_pad)) - slc['sigma0'].values = sigma0cerb_pad #on remplace par cerbere (temporairement) + sigma0cerb_pad = np.pad(sigma0cerb, ((0, diffatrack), (0, diffxtrack))) + sigma0cerb_pad = sigma0cerb_pad.reshape( + (1, sigma0cerb_pad.shape[0], sigma0cerb_pad.shape[1]) + ) + logging.info( + "sigma0cerb_pad %s mean = %s", + sigma0cerb_pad.shape, + np.nanmean(sigma0cerb_pad), + ) + slc[ + "sigma0" + ].values = sigma0cerb_pad # on remplace par cerbere (temporairement) # - sigma0 = slc['sigma0'].values.squeeze()[: :step,: :step] - sigma0[sigma0==0] = np.nan - #sigma0 = 10. * np.log10(sigma0) + sigma0 = slc["sigma0"].values.squeeze()[::step, ::step] + sigma0[sigma0 == 0] = np.nan + # sigma0 = 10. * np.log10(sigma0) maskfinite = np.isfinite(sigma0) - vmin = np.percentile(sigma0[maskfinite].ravel(),1) - vmax = np.percentile(sigma0[maskfinite].ravel(),99) - #vmin= .1 - #vmax = .15 - #vmin=-20 - #vmax = 0 - print('vmin',vmin,'vmax,',vmax) - levels = np.linspace(vmin,vmax,40) - #test with sigma0 NICE display + vmin = np.percentile(sigma0[maskfinite].ravel(), 1) + vmax = np.percentile(sigma0[maskfinite].ravel(), 99) + # vmin= .1 + # vmax = .15 + # vmin=-20 + # vmax = 0 + print("vmin", vmin, "vmax,", vmax) + levels = np.linspace(vmin, vmax, 40) + # test with sigma0 NICE display - incidence = slc['incidence'].values[: :step,: :step] - polarisation = slc.attrs['pols'] #expect VV - logging.info('incidence = %s pol : %s',incidence.shape,polarisation) - #roughness = compute_roughness(sigma0,incidence,polarisation) + incidence = slc["incidence"].values[::step, ::step] + polarisation = slc.attrs["pols"] # expect VV + logging.info("incidence = %s pol : %s", incidence.shape, polarisation) + # roughness = compute_roughness(sigma0,incidence,polarisation) roughness = sigma0 - logging.info('roughness : %s nb NaN : %s',roughness.shape,np.isnan(roughness).sum()) - logging.info('mea roughness : %s std %s',np.nanmean(roughness),np.nanstd(roughness)) - plt.contourf(slc['xtrack'].values[::step],slc['atrack'].values[::step],roughness,levels,cmap='Greys_r') + logging.info( + "roughness : %s nb NaN : %s", roughness.shape, np.isnan(roughness).sum() + ) + logging.info( + "mea roughness : %s std %s", np.nanmean(roughness), np.nanstd(roughness) + ) + plt.contourf( + slc["xtrack"].values[::step], + slc["atrack"].values[::step], + roughness, + levels, + cmap="Greys_r", + ) cb = plt.colorbar() cb.set_label(variable_displayed) - for lab in splitting_image : + for lab in splitting_image: rect = splitting_image[lab] - subset = slc['digital_number'].isel(azimuth=rect['azimuth'],range=rect['range']) - X,Y = np.meshgrid(subset.range,subset.azimuth) + subset = slc["digital_number"].isel( + azimuth=rect["azimuth"], range=rect["range"] + ) + X, Y = np.meshgrid(subset.range, subset.azimuth) # plt.plot(Y.ravel()[::10],X.ravel()[::10],'.',label=lab) # get corners range_positions_corners_ind = np.array( - [rect['range'].start,rect['range'].start,rect['range'].stop,rect['range'].stop,rect['range'].start]) - range_positions_corners = slc['range'].values[range_positions_corners_ind] - #print('range_positions_corners',range_positions_corners) + [ + rect["range"].start, + rect["range"].start, + rect["range"].stop, + rect["range"].stop, + rect["range"].start, + ] + ) + range_positions_corners = slc["range"].values[range_positions_corners_ind] + # print('range_positions_corners',range_positions_corners) azimuth_positions_corners_ind = np.array( - [rect['azimuth'].start,rect['azimuth'].stop,rect['azimuth'].stop,rect['azimuth'].start, - rect['azimuth'].start]) - azimuth_positions_corners = slc['azimuth'].values[azimuth_positions_corners_ind] - plt.plot(range_positions_corners,azimuth_positions_corners,'.-',label=lab,lw=5) + [ + rect["azimuth"].start, + rect["azimuth"].stop, + rect["azimuth"].stop, + rect["azimuth"].start, + rect["azimuth"].start, + ] + ) + azimuth_positions_corners = slc["azimuth"].values[azimuth_positions_corners_ind] + plt.plot( + range_positions_corners, azimuth_positions_corners, ".-", label=lab, lw=5 + ) mean_x = X.mean() mean_y = Y.mean() - plt.annotate('#%i' % lab,(mean_x,mean_y),weight='bold',fontsize=20) + plt.annotate("#%i" % lab, (mean_x, mean_y), weight="bold", fontsize=20) plt.legend(ncol=2) - plt.xlabel('range',fontsize=20) - plt.ylabel('azimuth',fontsize=20) + plt.xlabel("range", fontsize=20) + plt.ylabel("azimuth", fontsize=20) plt.grid(True) plt.show() -def display_xspectra_per_domain(allspecs_per_sub_domain,part='Re'): +def display_xspectra_per_domain(allspecs_per_sub_domain, part="Re"): """ :param allspecs_per_sub_domain: :param (str): Re or Im, component of complex x spectra to display :return: """ - cmap = mcolors.LinearSegmentedColormap.from_list("",["white","violet","mediumpurple","cyan","springgreen","yellow", - "red"]) + cmap = mcolors.LinearSegmentedColormap.from_list( + "", ["white", "violet", "mediumpurple", "cyan", "springgreen", "yellow", "red"] + ) ncols = int(np.sqrt(len(allspecs_per_sub_domain))) nrows = 0 - while nrows*ncols',xindplot,yindplot) # plt.subplot(4,4,subdom+1) - plt.title('sub domain #%s' % subdom) - if part=='Re': - crossSpectraRe = np.abs(allspecs_per_sub_domain[subdom]['cross-spectrum_2tau'].mean(dim='2tau').real) + plt.title("sub domain #%s" % subdom) + if part == "Re": + crossSpectraRe = np.abs( + allspecs_per_sub_domain[subdom]["cross-spectrum_2tau"] + .mean(dim="2tau") + .real + ) crossSpectraRe_red = crossSpectraRe.where( - np.logical_and(np.abs(crossSpectraRe.kx) <= 0.14,np.abs(crossSpectraRe.ky) <= 0.14),drop=True) - crossSpectraRe_red = crossSpectraRe_red.rolling(kx=3,center=True).mean().rolling(ky=3,center=True).mean() - if vmin is None : + np.logical_and( + np.abs(crossSpectraRe.kx) <= 0.14, np.abs(crossSpectraRe.ky) <= 0.14 + ), + drop=True, + ) + crossSpectraRe_red = ( + crossSpectraRe_red.rolling(kx=3, center=True) + .mean() + .rolling(ky=3, center=True) + .mean() + ) + if vmin is None: vmin = 0 vmax = crossSpectraRe_red.max() - levels = np.linspace(vmin,vmax,25) - crossSpectraRe_red.plot(x='kx',cmap=cmap,levels=levels,vmin=0) + levels = np.linspace(vmin, vmax, 25) + crossSpectraRe_red.plot(x="kx", cmap=cmap, levels=levels, vmin=0) else: - crossSpectraIm = allspecs_per_sub_domain[subdom]['cross-spectrum_2tau'].mean(dim='2tau').imag + crossSpectraIm = ( + allspecs_per_sub_domain[subdom]["cross-spectrum_2tau"] + .mean(dim="2tau") + .imag + ) crossSpectraIm_red = crossSpectraIm.where( - np.logical_and(np.abs(crossSpectraIm.kx) <= 0.14,np.abs(crossSpectraIm.ky) <= 0.14),drop=True) - crossSpectraIm_red = crossSpectraIm_red.rolling(kx=3,center=True).mean().rolling(ky=3,center=True).mean() - if vmin is None : + np.logical_and( + np.abs(crossSpectraIm.kx) <= 0.14, np.abs(crossSpectraIm.ky) <= 0.14 + ), + drop=True, + ) + crossSpectraIm_red = ( + crossSpectraIm_red.rolling(kx=3, center=True) + .mean() + .rolling(ky=3, center=True) + .mean() + ) + if vmin is None: vmin = -crossSpectraIm_red.max() vmin = crossSpectraIm_red.min() vmax = crossSpectraIm_red.max() - levels = np.linspace(vmin,vmax,25) - crossSpectraIm_red.plot(x='kx',cmap=PuOr,levels=levels,vmin=vmin) + levels = np.linspace(vmin, vmax, 25) + crossSpectraIm_red.plot(x="kx", cmap=PuOr, levels=levels, vmin=vmin) plt.grid() - plt.axis('scaled') - - + plt.axis("scaled") - -def display_xspectra_per_domain_polar(allspecs_per_sub_domain,heading,part='Re',min_wavelength=100): +def display_xspectra_per_domain_polar( + allspecs_per_sub_domain, heading, part="Re", min_wavelength=100 +): """ :param allspecs_per_sub_domain: :return: """ - cmap = mcolors.LinearSegmentedColormap.from_list("",["white","violet","mediumpurple","cyan","springgreen","yellow", - "red"]) + cmap = mcolors.LinearSegmentedColormap.from_list( + "", ["white", "violet", "mediumpurple", "cyan", "springgreen", "yellow", "red"] + ) ncols = int(np.sqrt(len(allspecs_per_sub_domain))) nrows = 0 - while nrows*ncols Date: Tue, 7 Jan 2025 12:43:56 +0100 Subject: [PATCH 05/10] add CI for unit test and fix method name in doc sphinx --- .github/workflows/ci.yaml | 84 +++++++++++++++++++ docs/examples/display_a_IW_L1B_xspectra.ipynb | 2 +- pyproject.toml | 41 ++++++++- 3 files changed, 122 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/ci.yaml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..ee6a869 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,84 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + branches: [main] + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + detect-skip-ci-trigger: + name: "Detect CI Trigger: [skip-ci]" + if: | + github.repository == 'umr-lops/utils_xsarslc_l1b' + && ( + github.event_name == 'push' || github.event_name == 'pull_request' + ) + runs-on: ubuntu-latest + outputs: + triggered: ${{ steps.detect-trigger.outputs.trigger-found }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 2 + - uses: xarray-contrib/ci-trigger@v1 + id: detect-trigger + with: + keyword: "[skip-ci]" + + ci: + name: ${{ matrix.os }} py${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + needs: detect-skip-ci-trigger + + if: needs.detect-skip-ci-trigger.outputs.triggered == 'false' + + defaults: + run: + shell: bash -l {0} + + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + os: ["ubuntu-latest", "macos-latest", "windows-latest"] + + steps: + - name: Checkout the repository + uses: actions/checkout@v4 + with: + # need to fetch all tags to get a correct version + fetch-depth: 0 # fetch all branches and tags + + - name: Setup environment variables + run: | + echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV + + echo "CONDA_ENV_FILE=ci/requirements/environment.yaml" >> $GITHUB_ENV + + - name: Setup micromamba + uses: mamba-org/setup-micromamba@v2 + with: + environment-file: ${{ env.CONDA_ENV_FILE }} + environment-name: slcl1butils-tests + cache-environment: true + cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{matrix.python-version}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}" + create-args: >- + python=${{matrix.python-version}} + + - name: Install slcl1butils + run: | + python -m pip install --no-deps -e . + + - name: Import slcl1butils + run: | + python -c "import slcl1butils" + + - name: Run tests + run: | + python -m pytest --cov=slcl1butils diff --git a/docs/examples/display_a_IW_L1B_xspectra.ipynb b/docs/examples/display_a_IW_L1B_xspectra.ipynb index d02201c..d60f5f7 100644 --- a/docs/examples/display_a_IW_L1B_xspectra.ipynb +++ b/docs/examples/display_a_IW_L1B_xspectra.ipynb @@ -187,7 +187,7 @@ "tile_line_i = 0\n", "tile_sample_i = 3\n", "fig = plt.figure()\n", - "slcl1butils.plotting.display_one_spectra.plot_a_single_xspec_cart_L1B_IW(ds,'VV',tile_line_i,tile_sample_i,'test display',fig,cat_xspec=cat_xspec,part='Re')" + "slcl1butils.plotting.display_one_spectra.plot_a_single_xspec_cart_l1b_iw(ds,'VV',tile_line_i,tile_sample_i,'test display',fig,cat_xspec=cat_xspec,part='Re')" ] } ], diff --git a/pyproject.toml b/pyproject.toml index 0b27ea0..e70ab32 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,11 @@ packages = ["slcl1butils"] [tool.setuptools_scm] fallback_version = "999" +[project.scripts] +do_IW_L1C_SAFE_from_L1B_SAFE = "slcl1butils.scripts.do_IW_L1C_SAFE_from_L1B_SAFE:main" +do_WV_L1C_SAFE_from_L1B_SAFE = "slcl1butils.scripts.do_WV_L1C_SAFE_from_L1B_SAFE:main" +stack_WV_L1C_monthly = 'slcl1butils.compute.stack_wv_l1c_monthly:main' + [tool.isort] profile = "black" skip_gitignore = true @@ -47,7 +52,35 @@ float_to_top = true default_section = "THIRDPARTY" known_first_party = "slcl1butils" -[project.scripts] -do_IW_L1C_SAFE_from_L1B_SAFE = "slcl1butils.scripts.do_IW_L1C_SAFE_from_L1B_SAFE:main" -do_WV_L1C_SAFE_from_L1B_SAFE = "slcl1butils.scripts.do_WV_L1C_SAFE_from_L1B_SAFE:main" -stack_WV_L1C_monthly = 'slcl1butils.compute.stack_wv_l1c_monthly:main' \ No newline at end of file +[tool.coverage.report] +show_missing = true +exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"] + +[tool.ruff.lint] +ignore = [ + "E402", # module level import not at top of file + "E501", # line too long - let black worry about that + "E731", # do not assign a lambda expression, use a def + "UP038", # type union instead of tuple for isinstance etc +] +select = [ + "F", # Pyflakes + "E", # Pycodestyle + "I", # isort + "UP", # Pyupgrade + "TID", # flake8-tidy-imports + "W", +] +extend-safe-fixes = [ + "TID252", # absolute imports + "UP031", # percent string interpolation +] +fixable = ["I", "TID252", "UP"] + +[tool.ruff.lint.isort] +known-first-party = ["safe_s1"] +known-third-party = ["xarray", "toolz", "construct"] + +[tool.ruff.lint.flake8-tidy-imports] +# Disallow all relative imports. +ban-relative-imports = "all" From 5b9046cd39302734955dd38870096d7b65f93c49 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 12:47:40 +0100 Subject: [PATCH 06/10] add pytestcov --- ci/requirements/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/requirements/environment.yaml b/ci/requirements/environment.yaml index e753605..269e160 100644 --- a/ci/requirements/environment.yaml +++ b/ci/requirements/environment.yaml @@ -14,6 +14,7 @@ dependencies: # testing - pytest - pytest-reportlog + - pytest-cov - hypothesis - coverage # I/O From d3921a17bedf9f41e37252f6ddca5b52b22ec706 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 14:15:02 +0100 Subject: [PATCH 07/10] fix NaN->nan --- slcl1butils/tests/test_resampling.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/slcl1butils/tests/test_resampling.py b/slcl1butils/tests/test_resampling.py index 29364c3..3aec995 100644 --- a/slcl1butils/tests/test_resampling.py +++ b/slcl1butils/tests/test_resampling.py @@ -19,7 +19,7 @@ N,M = 100,100 field_ww3 = np.random.rand(N,M) field_ww3_with_nan = copy.copy(field_ww3) -field_ww3_with_nan[1,2] = np.NaN +field_ww3_with_nan[1,2] = np.nan xa_ww3 = xr.DataArray(field_ww3, dims=('x','y'), coords={'y':np.linspace(20,50,N), 'x':np.linspace(100,150,M)}) From 99ba60edafb0325978382d8624934e7912e23cd5 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 16:24:59 +0100 Subject: [PATCH 08/10] fix python version --- ci/requirements/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/requirements/environment.yaml b/ci/requirements/environment.yaml index 269e160..6bc76c2 100644 --- a/ci/requirements/environment.yaml +++ b/ci/requirements/environment.yaml @@ -2,7 +2,7 @@ name: slcl1butils-tests channels: - conda-forge dependencies: - - python=3.10 + - python # development - ipython - pre-commit From 5f5d14df3b4152a345ee3258e942c42185c68ff9 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 17:03:27 +0100 Subject: [PATCH 09/10] handle backslah path in windows --- slcl1butils/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/slcl1butils/utils.py b/slcl1butils/utils.py index a9fb5d1..3b5e0a7 100644 --- a/slcl1butils/utils.py +++ b/slcl1butils/utils.py @@ -224,4 +224,5 @@ def get_l1c_filepath(l1b_fullpath, version, outputdir=None, makedir=True): logging.debug("File out: %s ", l1c_full_path) if not os.path.exists(os.path.dirname(l1c_full_path)) and makedir: os.makedirs(os.path.dirname(l1c_full_path), 0o0775) + l1c_full_path = os.path.normpath(l1c_full_path) return l1c_full_path From a3b7dc93b4d5d10aa8362172bae5811f7d363888 Mon Sep 17 00:00:00 2001 From: grouazel Date: Tue, 7 Jan 2025 17:16:31 +0100 Subject: [PATCH 10/10] handle backslah path in windows --- slcl1butils/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/slcl1butils/utils.py b/slcl1butils/utils.py index 3b5e0a7..ae73986 100644 --- a/slcl1butils/utils.py +++ b/slcl1butils/utils.py @@ -224,5 +224,5 @@ def get_l1c_filepath(l1b_fullpath, version, outputdir=None, makedir=True): logging.debug("File out: %s ", l1c_full_path) if not os.path.exists(os.path.dirname(l1c_full_path)) and makedir: os.makedirs(os.path.dirname(l1c_full_path), 0o0775) - l1c_full_path = os.path.normpath(l1c_full_path) + l1c_full_path = os.path.normpath(l1c_full_path).replace("\\","/") # platform windows security return l1c_full_path