Skip to content

Commit

Permalink
Merge pull request #82 from vincelhx/main
Browse files Browse the repository at this point in the history
only GmfLut & NcLutModel
renamed flattened nesz var
  • Loading branch information
vincelhx authored Oct 10, 2024
2 parents 8b67c0b + 56489e2 commit 3fccd4c
Show file tree
Hide file tree
Showing 3 changed files with 74 additions and 112 deletions.
105 changes: 52 additions & 53 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 360 # 6 hours limit for the job

steps:
# Checkout the code
- name: Checkout code
uses: actions/checkout@v4
# Checkout the code
- name: Checkout code
uses: actions/checkout@v4

- uses: mamba-org/setup-micromamba@v1
with:
Expand All @@ -42,57 +42,56 @@ jobs:
micromamba activate grdwind_env
conda install --use-local grdwindinversion -y -c tcevaer -c conda-forge
# Cache the test data if previously downloaded (up to 10 GB limit for the cache)
# WARNING : modify the key if the data is modified !!
- name: Cache test data
uses: actions/cache@v4
id: cache
with:
path: ./test_data
key: test-data-v3
restore-keys: test-data-v3
# Cache the test data if previously downloaded (up to 10 GB limit for the cache)
# WARNING : modify the key if the data is modified !!
- name: Cache test data
uses: actions/cache@v4
id: cache
with:
path: ./test_data
key: test-data-v3
restore-keys: test-data-v3

# Download test data if not already cached
- name: Download test data
if: steps.cache.outputs.cache-hit != 'true' # Only download if cache miss
run: |
mkdir -p ./test_data/
wget https://cloud.ifremer.fr/index.php/s/ExLQ2TnYAqozPWE/download -O /tmp/ecmwf.zip
unzip /tmp/ecmwf.zip -d ./test_data/
wget https://cloud.ifremer.fr/index.php/s/kRgdOOPsjoZieZR/download -O /tmp/l1.zip
unzip /tmp/l1.zip -d ./test_data/
timeout-minutes: 200 # Adjust depending on the size of your data
# Download test data if not already cached
- name: Download test data
if: steps.cache.outputs.cache-hit != 'true' # Only download if cache miss
run: |
mkdir -p ./test_data/
wget https://cloud.ifremer.fr/index.php/s/ExLQ2TnYAqozPWE/download -O /tmp/ecmwf.zip
unzip /tmp/ecmwf.zip -d ./test_data/
wget https://cloud.ifremer.fr/index.php/s/kRgdOOPsjoZieZR/download -O /tmp/l1.zip
unzip /tmp/l1.zip -d ./test_data/
timeout-minutes: 200 # Adjust depending on the size of your data

# Set up xsar configuration
- name: Setup xsar configuration
run: |
mkdir -p ~/.xsar
echo "data_dir: /tmp" > ~/.xsar/config.yaml
echo "auxiliary_dir: ./test_data/auxiliary" >> ~/.xsar/config.yaml
echo "path_dataframe_aux: ./test_data/auxiliary/active_aux.csv" >> ~/.xsar/config.yaml
# Set up xsar configuration
- name: Setup xsar configuration
run: |
mkdir -p ~/.xsar
echo "data_dir: /tmp" > ~/.xsar/config.yaml
echo "auxiliary_dir: ./test_data/auxiliary" >> ~/.xsar/config.yaml
echo "path_dataframe_aux: ./test_data/auxiliary/active_aux.csv" >> ~/.xsar/config.yaml
# Set up grdwindinversion configuration
- name: Setup grdwindinversion configuration
run: |
mkdir -p ~/.grdwindinversion
echo "'ecmwf_0100_1h': ./test_data/ECMWF/forecast/hourly/0100deg/netcdf_light/%Y/%j/ECMWF_FORECAST_0100_%Y%m%d%H%M_10U_10V.nc" > ~/.grdwindinversion/data_config.yaml
echo "'ecmwf_0125_1h': ./test_data/ECMWF/0.125deg/1h/forecasts/%Y/%j/ecmwf_%Y%m%d%H%M.nc" >> ~/.grdwindinversion/data_config.yaml
#echo "'sarwing_luts_path': './test_data/GMFS/v1.6/'" >> ~/.grdwindinversion/data_config.yaml
#echo "'nc_luts_path': ./test_data/GMFS/nc_luts" >> ~/.grdwindinversion/data_config.yaml
#echo "'lut_cmod7_path': './test_data/GMFS/v1.6/GMF_cmod7_official/cmod7_and_python_script'" >> ~/.grdwindinversion/data_config.yaml
#echo "'lut_ms1ahw_path': './test_data/GMFS/v1.6/GMF_cmodms1ahw'" >> ~/.grdwindinversion/data_config.yaml
# Set up grdwindinversion configuration
- name: Setup grdwindinversion configuration
run: |
mkdir -p ~/.grdwindinversion
echo "'ecmwf_0100_1h': ./test_data/ECMWF/forecast/hourly/0100deg/netcdf_light/%Y/%j/ECMWF_FORECAST_0100_%Y%m%d%H%M_10U_10V.nc" > ~/.grdwindinversion/data_config.yaml
echo "'ecmwf_0125_1h': ./test_data/ECMWF/0.125deg/1h/forecasts/%Y/%j/ecmwf_%Y%m%d%H%M.nc" >> ~/.grdwindinversion/data_config.yaml
#echo "'nc_luts_path': ./test_data/GMFS/nc_luts" >> ~/.grdwindinversion/data_config.yaml
#echo "'lut_cmod7_path': './test_data/GMFS/v1.6/GMF_cmod7_official/cmod7_and_python_script'" >> ~/.grdwindinversion/data_config.yaml
#echo "'lut_ms1ahw_path': './test_data/GMFS/v1.6/GMF_cmodms1ahw'" >> ~/.grdwindinversion/data_config.yaml
# Run the tests
- name: Run tests
run: |
eval "$(micromamba shell hook --shell bash)"
micromamba activate grdwind_env
pytest
# Run the tests
- name: Run tests
run: |
eval "$(micromamba shell hook --shell bash)"
micromamba activate grdwind_env
pytest
# Optionally, upload test artifacts (NetCDF files or logs) if needed
#- name: Upload test artifacts
# if: failure() # Only upload on failure
# uses: actions/upload-artifact@v2
# with:
# name: test-output
# path: ./test_output/
# Optionally, upload test artifacts (NetCDF files or logs) if needed
#- name: Upload test artifacts
# if: failure() # Only upload on failure
# uses: actions/upload-artifact@v2
# with:
# name: test-output
# path: ./test_output/
28 changes: 0 additions & 28 deletions grdwindinversion/.travis.yml

This file was deleted.

53 changes: 22 additions & 31 deletions grdwindinversion/inversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,10 +98,10 @@ def getOutputName2(input_file, outdir, sensor, meta, subdir=True):
regex = re.compile(
"([A-Z0-9]+)_OK([0-9]+)_PK([0-9]+)_(.*?)_(.*?)_(.*?)_(.*?)_(.*?)_(.*?)_(.*?)")
template = string.Template(
"${MISSIONID}_OK${DATA1}_PK${DATA2}_${DATA3}_${BEAM_MODE}_${DATE}_${TIME}_${POLARIZATION1}_${POLARIZATION2}_${PRODUCT}")
"${MISSIONID}_OK${DATA1}_PK${DATA2}_${DATA3}_${BEAM}_${DATE}_${TIME}_${POLARIZATION1}_${POLARIZATION2}_${PRODUCT}")
match = regex.match(basename_match)
MISSIONID, DATA1, DATA2, DATA3, BEAM_MODE, DATE, TIME, POLARIZATION1, POLARIZATION2, LAST = match.groups()
new_format = f"{MISSIONID.lower()}-{BEAM_MODE.lower()}-owi-xx-{meta_start_date.lower()}-{meta_stop_date.lower()}-_____-_____.nc"
new_format = f"{MISSIONID.lower()}-{BEAM.lower()}-owi-xx-{meta_start_date.lower()}-{meta_stop_date.lower()}-_____-_____.nc"
else:
raise ValueError(
"sensor must be S1A|S1B|RS2|RCM, got sensor %s" % sensor)
Expand Down Expand Up @@ -243,7 +243,7 @@ def inverse(dual_pol, inc, sigma0, sigma0_dual, ancillary_wind, dsig_cr, model_c
See Also
--------
xsarsea documentation
https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsarsea/examples/windspeed_inversion.html
https://cerweb.ifremer.fr/datarmor/doc_sphinx/xsarsea/
"""
logging.debug("inversion")

Expand Down Expand Up @@ -282,7 +282,7 @@ def inverse(dual_pol, inc, sigma0, sigma0_dual, ancillary_wind, dsig_cr, model_c
return wind_co, None, None


def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks):
def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks, apply_flattening):
"""
Rename xr_dataset variables and attributes to match naming convention.
Expand Down Expand Up @@ -392,13 +392,18 @@ def makeL2asOwi(xr_dataset, dual_pol, copol, crosspol, add_streaks):

xr_dataset = xr_dataset.rename({
'dsig_cross': 'owiDsig_cross',
'nesz_cross_final': 'owiNesz_cross_final',
'winddir_cross': 'owiWindDirection_cross',
'winddir_dual': 'owiWindDirection',
'windspeed_cross': 'owiWindSpeed_cross',
'windspeed_dual': 'owiWindSpeed',
'sigma0_detrend_cross': 'owiNrcs_detrend_cross'
})

if apply_flattening:
xr_dataset = xr_dataset.rename({
'nesz_cross_flattened': 'owiNesz_cross_flattened',
})

# nrcs cross
xr_dataset['owiNrcs_cross'] = xr_dataset['sigma0_ocean'].sel(
pol=crosspol)
Expand Down Expand Up @@ -635,15 +640,7 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False
config["l2_params"]["model_cross"] = model_cross
config["sensor_longname"] = sensor_longname

# need to load gmfs before inversion
gmfs_impl = [x for x in [model_co, model_cross] if "gmf_" in x]
windspeed.gmfs.GmfModel.activate_gmfs_impl(gmfs_impl)
sarwings_luts = [x for x in [model_co, model_cross]
if x.startswith("sarwing_lut_")]

if len(sarwings_luts) > 0:
windspeed.register_sarwing_luts(getConf()["sarwing_luts_path"])

# need to load LUTs before inversion
nc_luts = [x for x in [model_co, model_cross] if x.startswith("nc_lut")]

if len(nc_luts) > 0:
Expand Down Expand Up @@ -767,22 +764,17 @@ def preprocess(filename, outdir, config_path, overwrite=False, add_streaks=False
xr_dataset['sigma0_detrend_cross'] = xsarsea.sigma0_detrend(
xr_dataset.sigma0.sel(pol=crosspol), xr_dataset.incidence, model=model_cross)
if config["apply_flattening"]:
xr_dataset = xr_dataset.assign(nesz_cross_final=(
xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
['line', 'sample'], windspeed.nesz_flattening(xr_dataset.nesz.sel(pol=crosspol), xr_dataset.incidence)))
xr_dataset['nesz_cross_final'].attrs[
xr_dataset['nesz_cross_flattened'].attrs[
"comment"] = 'nesz has been flattened using windspeed.nesz_flattening'

# dsig
xr_dataset["dsig_cross"] = windspeed.get_dsig(config["dsig_"+crosspol_gmf+"_NAME"], xr_dataset.incidence,
xr_dataset['sigma0_ocean'].sel(pol=crosspol), xr_dataset.nesz_cross_flattened)
else:
xr_dataset = xr_dataset.assign(
nesz_cross_final=(['line', 'sample'], xr_dataset.nesz.sel(pol=crosspol).values))
xr_dataset['nesz_cross_final'].attrs["comment"] = 'nesz has not been flattened'

xr_dataset.nesz_cross_final.attrs['units'] = 'm^2 / m^2'
xr_dataset.nesz_cross_final.attrs['long_name'] = 'Noise Equivalent SigmaNaught'

# dsig
xr_dataset["dsig_cross"] = windspeed.get_dsig(config["dsig_"+crosspol_gmf+"_NAME"], xr_dataset.incidence,
xr_dataset['sigma0_ocean'].sel(pol=crosspol), xr_dataset.nesz_cross_final)
# dsig
xr_dataset["dsig_cross"] = windspeed.get_dsig(config["dsig_"+crosspol_gmf+"_NAME"], xr_dataset.incidence,
xr_dataset['sigma0_ocean'].sel(pol=crosspol), xr_dataset['sigma0_ocean'].sel(pol=crosspol))

xr_dataset.dsig_cross.attrs['comment'] = 'variable used to ponderate copol and crosspol'
xr_dataset.dsig_cross.attrs['formula_used'] = config["dsig_" +
Expand Down Expand Up @@ -936,8 +928,7 @@ def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add

xr_dataset['winddir_dual'] = transform_winddir(
wind_dual, xr_dataset.ground_heading, winddir_convention=config["winddir_convention"])
xr_dataset['winddir_dual'].attrs["model"] = "%s (%s) & %s (%s)" % (
model_co, copol, model_cross, crosspol)
xr_dataset["winddir_dual"].attrs["model"] = "winddir_dual is a copy of copol wind direction"

xr_dataset = xr_dataset.assign(
windspeed_cross=(['line', 'sample'], windspeed_cr))
Expand All @@ -949,7 +940,7 @@ def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add

xr_dataset['winddir_cross'] = xr_dataset['winddir_dual'].copy()
xr_dataset['winddir_cross'].attrs = xr_dataset['winddir_dual'].attrs
xr_dataset["winddir_cross"].attrs["model"] = "No model used ; content is a copy of dualpol wind direction"
xr_dataset["winddir_cross"].attrs["model"] = "winddir_cross is a copy of copol wind direction"

if config["winddir_convention"] == "oceanographic":
attrs = xr_dataset['ancillary_wind_direction'].attrs
Expand All @@ -960,7 +951,7 @@ def makeL2(filename, outdir, config_path, overwrite=False, generateCSV=True, add
"long_name"] = f"{ancillary_name} wind direction in oceanographic convention (clockwise, to), ex: 0°=to north, 90°=to east"

xr_dataset, encoding = makeL2asOwi(
xr_dataset, dual_pol, copol, crosspol, add_streaks=add_streaks)
xr_dataset, dual_pol, copol, crosspol, add_streaks=add_streaks, apply_flattening=config["apply_flattening"])

#  add attributes
firstMeasurementTime = None
Expand Down

0 comments on commit 3fccd4c

Please sign in to comment.