Skip to content

Commit

Permalink
Added unit tests and examples (#9)
Browse files Browse the repository at this point in the history
* added method "dataset" to __init__

* using open_dataset i.s.o. load_dataset makes things much faster...

* added pytests for each class

* added an example notebook

* skipping gfs forecast tests for now

* corrected case sensitive z use for cycle

---------

Co-authored-by: Maarten van Ormondt <maarten.vanormondt@deltares.nl>
  • Loading branch information
panosatha and maartenvanormondt authored Feb 25, 2025
1 parent 44cccd7 commit 05de90f
Show file tree
Hide file tree
Showing 12 changed files with 632 additions and 87 deletions.
24 changes: 24 additions & 0 deletions cht_meteo/cht/meteo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,31 @@
@author: ormondt
"""

from cht_meteo.cht.meteo.coamps_tc_forecast_s3 import MeteoDatasetCOAMPSTCForecastS3
from cht_meteo.cht.meteo.database import MeteoDatabase
from cht_meteo.cht.meteo.dataset import MeteoDataset
from cht_meteo.cht.meteo.gfs_anl_0p50 import MeteoDatasetGFSAnalysis0p50
from cht_meteo.cht.meteo.gfs_forecast_0p25 import MeteoDatasetGFSForecast0p25

__all__ = ["MeteoDatabase"]


def dataset(name=None, source=None, path=None, **kwargs):
if source is not None:
if source == "coamps_tc_forecast_s3":
md = MeteoDatasetCOAMPSTCForecastS3(name=name, path=path, **kwargs)
elif source == "gfs_forecast_0p25":
md = MeteoDatasetGFSForecast0p25(name=name, path=path, **kwargs)
elif source == "gfs_analysis_0p50":
md = MeteoDatasetGFSAnalysis0p50(name=name, path=path, **kwargs)
else:
md = MeteoDataset(name=name)
print(
f"Error while reading meteo database : source {source} not recognized"
)

else:
# Use generic meteo dataset (this does not have download functionality)
md = MeteoDataset(name=name)

return md
30 changes: 24 additions & 6 deletions cht_meteo/cht/meteo/coamps_tc_forecast_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
import xarray as xr
from cht_utils import fileops as fo

from cht_meteo.cht.meteo.dataset import MeteoDataset
# from cht_meteo.cht.meteo.metget_utils import get_storm_track
from .dataset import MeteoDataset


class MeteoSubset:
Expand Down Expand Up @@ -66,7 +65,7 @@ def download_forecast_cycle(self, **kwargs):
# Get year from cycle_time
year = cycle_time.year
cycle_time_coamps = cycle_time.strftime("%Y%m%d%H")
cycle_time_meteo = cycle_time.strftime("%Y%m%d_%HZ")
cycle_time_meteo = cycle_time.strftime("%Y%m%d_%Hz")

# Make folder for the forecast
forecast_path = os.path.join(
Expand Down Expand Up @@ -140,9 +139,6 @@ def download_forecast_cycle(self, **kwargs):
# Remove the temporary folder
fo.delete_folder(tar_file_path)

# Also try to download the track file
track_file_name = storm_number + "_" + cycle_time_coamps + "_track.txt" # noqa: F841


def convert_coamps_nc_to_meteo_nc(inpfile, outfile):
# Open the COAMPS-TC netcdf file
Expand Down Expand Up @@ -172,3 +168,25 @@ def convert_coamps_nc_to_meteo_nc(inpfile, outfile):

# Write output file
ds.to_netcdf(outfile)


def get_storm_track(track_path: str, year: int, storm: str, cycle: str):
"""
Retrieves the storm track data for a given year, storm, and cycle.
Parameters:
year (int): The year of the storm track data.
storm (str): The name of the storm.
cycle (str): The cycle of the storm track data.
Returns:
bytes: The content of the storm track data.
"""
try:
filename = os.path.join(track_path, f"TRK_COAMPS_CTCX_3_{cycle}_{storm}.trk")
url = f"https://coamps-tc-data.s3.us-east-2.amazonaws.com/deterministic/realtime/{year}/{storm}/{cycle}/TRK_COAMPS_CTCX_3_{cycle}_{storm}"
urllib.request.urlretrieve(url, filename)
except Exception as e:
print(f"Error downloading {url}")
print(e)
12 changes: 6 additions & 6 deletions cht_meteo/cht/meteo/dataset.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import datetime
import os
from typing import Optional

import cht_utils.fileops as fo
import numpy as np
Expand Down Expand Up @@ -580,7 +581,6 @@ def interpolate_dataset(self, dataset, copy_time=False):

def merge_datasets(self, datasets, **kwargs):
"""Merge datasets. This is useful when we have multiple datasets with different resolutions."""
# For now we just return the first dataset
for dataset in datasets:
self.interpolate_dataset(dataset)

Expand Down Expand Up @@ -694,11 +694,11 @@ def interpolate_variable(

return v

def to_netcdf(self, **kwargs):
def to_netcdf(self, file_name: Optional[os.PathLike] = None, **kwargs):
"""Write to netcdf files. This is not implemented yet."""
if "filename" in kwargs:
if file_name:
# Write to single file
self.ds.to_netcdf(path=kwargs["filename"])
self.ds.to_netcdf(path=file_name)
else:
# Write to database
os.makedirs(self.path, exist_ok=True)
Expand All @@ -716,7 +716,7 @@ def to_netcdf(self, **kwargs):

def to_delft3d(
self,
file_name,
file_name: Optional[os.PathLike] = None,
version="1.03",
path=None,
header_comments=False,
Expand All @@ -735,7 +735,7 @@ def to_delft3d(
if format == "ascii":
write_to_delft3d_ascii(
self,
file_name,
str(file_name),
version,
path,
header_comments,
Expand Down
10 changes: 6 additions & 4 deletions cht_meteo/cht/meteo/gfs_anl_0p50.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# from xarray.backends import NetCDF4DataStore
import xarray as xr

from cht_meteo.cht.meteo.dataset import MeteoDataset
from .dataset import MeteoDataset


class MeteoDatasetGFSAnalysis0p50(MeteoDataset):
Expand Down Expand Up @@ -45,8 +45,7 @@ def download_analysis_times(self, requested_times, **kwargs):
londeg = "east"
if lon_range[0] < 0:
londeg = "west"
lon_range[0] = 360.0 + lon_range[0]
lon_range[1] = 360.0 + lon_range[1]
lon_range = (360.0 + lon_range[0], 360.0 + lon_range[1])
# lon_range[0] = np.mod(lon_range[0], 360.0)
# lon_range[1] = np.mod(lon_range[1], 360.0)

Expand Down Expand Up @@ -220,7 +219,8 @@ def download_analysis_times(self, requested_times, **kwargs):
ds0 = None
for iattempt in range(10):
try:
ds0 = xr.load_dataset(url + name)
# ds0 = xr.load_dataset(url + name)
ds0 = xr.open_dataset(url + name)
if iattempt > 0:
print("Success at attempt no " + int(iattempt + 1))
break
Expand Down Expand Up @@ -267,6 +267,8 @@ def download_analysis_times(self, requested_times, **kwargs):

ds[param] = xr.DataArray(val, dims=("lat", "lon"))

ds0.close()

else:
print("Could not get data ...")

Expand Down
2 changes: 1 addition & 1 deletion cht_meteo/cht/meteo/gfs_forecast_0p25.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from siphon.catalog import TDSCatalog
from xarray.backends import NetCDF4DataStore

from cht_meteo.cht.meteo.dataset import MeteoDataset
from .dataset import MeteoDataset


class MeteoDatasetGFSForecast0p25(MeteoDataset):
Expand Down
Loading

0 comments on commit 05de90f

Please sign in to comment.