Skip to content

Commit

Permalink
Update how alias is used with "file_key" in ds_info, which eliminates…
Browse files Browse the repository at this point in the history
… the need for a special lookup within reader

Add tests to make sure aliases are created.
  • Loading branch information
joleenf committed Apr 11, 2023
1 parent 7184c8c commit c6d0122
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 40 deletions.
25 changes: 7 additions & 18 deletions satpy/readers/clavrx.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,19 +290,6 @@ def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict

return attr_info

@staticmethod
def _lookup_alias(vname: str, sensor: str, is_polar: bool) -> str:
"""Return variable name if channel name is an alias for a different variable."""
# Why? The aliases provide built-in access to the base sensor RGB composites.
if is_polar:
# Not implemented
pass
else:
dd = CHANNEL_ALIASES[sensor]
key = next(key for key, value in dd.items() if value["name"] == vname)

return key


class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper):
"""A file handler for CLAVRx files."""
Expand Down Expand Up @@ -464,13 +451,16 @@ def _available_new_datasets(self, handled_vars):
continue

ds_info = self._get_ds_info_for_data_arr(var_name)
ds_info.update({"file_key": var_name})
yield True, ds_info

alias_info = CHANNEL_ALIASES[self.sensor].get(var_name, None)
if alias_info is not None:
if self.nc.attrs["RESOLUTION_KM"] is not None:
alias_info["resolution"] = self.nc.attrs.get("RESOLUTION_KM", "2")
alias_info["resolution"] = alias_info["resolution"] * 1000.
alias_info.update({"file_key": var_name})
if "RESOLUTION_KM" in self.nc.attrs:
alias_info["resolution"] = self.nc.attrs["RESOLUTION_KM"] * 1000.
else:
alias_info["resolution"] = NADIR_RESOLUTION[self.sensor]
ds_info.update(alias_info)
yield True, ds_info

Expand Down Expand Up @@ -510,8 +500,7 @@ def get_area_def(self, key):

def get_dataset(self, dataset_id, ds_info):
"""Get a dataset for supported geostationary sensors."""
var_name = ds_info.get('name', dataset_id['name'])
var_name = _CLAVRxHelper._lookup_alias(var_name, self.sensor, self._is_polar())
var_name = ds_info.get("file_key", dataset_id['name'])
data = self[var_name]
data = _CLAVRxHelper._get_data(data, dataset_id)
data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform,
Expand Down
55 changes: 33 additions & 22 deletions satpy/tests/reader_tests/test_clavrx_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc'
FILL_VALUE = -32768


def fake_test_content(filename, **kwargs):
Expand All @@ -51,7 +52,8 @@ def fake_test_content(filename, **kwargs):
longitude = xr.DataArray(DEFAULT_LON_DATA,
dims=('scan_lines_along_track_direction',
'pixel_elements_along_scan_direction'),
attrs={'_FillValue': np.nan,
attrs={'_FillValue': -999.,
'SCALED': 0,
'scale_factor': 1.,
'add_offset': 0.,
'standard_name': 'longitude',
Expand All @@ -61,37 +63,37 @@ def fake_test_content(filename, **kwargs):
latitude = xr.DataArray(DEFAULT_LAT_DATA,
dims=('scan_lines_along_track_direction',
'pixel_elements_along_scan_direction'),
attrs={'_FillValue': np.nan,
attrs={'_FillValue': -999.,
'SCALED': 0,
'scale_factor': 1.,
'add_offset': 0.,
'standard_name': 'latitude',
'units': 'degrees_south'
})

variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32),
variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int8),
dims=('scan_lines_along_track_direction',
'pixel_elements_along_scan_direction'),
attrs={'_FillValue': np.nan,
'scale_factor': 1.,
'add_offset': 0.,
attrs={'_FillValue': -127,
'SCALED': 0,
'units': '1',
'valid_range': [-32767, 32767],
})

# data with fill values
variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32),
# data with fill values and a file_type alias
variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int16),
dims=('scan_lines_along_track_direction',
'pixel_elements_along_scan_direction'),
attrs={'_FillValue': np.nan,
'scale_factor': 1.,
'add_offset': 0.,
'units': '1',
attrs={'_FillValue': FILL_VALUE,
'SCALED': 1,
'scale_factor': 0.001861629,
'add_offset': 59.,
'units': '%',
'valid_range': [-32767, 32767],
})
variable2 = variable2.where(variable2 % 2 != 0)
variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE)

# category
variable3 = xr.DataArray(DEFAULT_FILE_FLAGS,
variable3 = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8),
dims=('scan_lines_along_track_direction',
'pixel_elements_along_scan_direction'),
attrs={'SCALED': 0,
Expand All @@ -103,7 +105,7 @@ def fake_test_content(filename, **kwargs):
'longitude': longitude,
'latitude': latitude,
'variable1': variable1,
'variable2': variable2,
'refl_0_65um_nom': variable2,
'variable3': variable3
}

Expand Down Expand Up @@ -141,7 +143,7 @@ def test_reader_creation(self, filenames, expected_loadables):

@pytest.mark.parametrize(
("filenames", "expected_datasets"),
[([AHI_FILE], ['variable1', 'variable2', 'variable3']), ]
[([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ]
)
def test_available_datasets(self, filenames, expected_datasets):
"""Test that variables are dynamically discovered."""
Expand All @@ -154,10 +156,13 @@ def test_available_datasets(self, filenames, expected_datasets):
avails = list(r.available_dataset_names)
for var_name in expected_datasets:
assert var_name in avails
# check extra datasets created by alias or coordinates
for var_name in ["latitude", "longitude", "C03"]:
assert var_name in avails

@pytest.mark.parametrize(
("filenames", "loadable_ids"),
[([AHI_FILE], ['variable1', 'variable2', 'variable3']), ]
[([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'C03', 'variable3']), ]
)
def test_load_all_new_donor(self, filenames, loadable_ids):
"""Test loading all test datasets with new donor."""
Expand All @@ -184,18 +189,24 @@ def test_load_all_new_donor(self, filenames, loadable_ids):
)
fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
datasets = r.load(loadable_ids)
assert len(datasets) == 3
assert len(datasets) == 4
for v in datasets.values():
assert 'calibration' not in v.attrs
assert v.attrs['units'] == '1'
assert "units" in v.attrs
assert isinstance(v.attrs['area'], AreaDefinition)
assert v.attrs['platform_name'] == 'himawari8'
assert v.attrs['sensor'] == 'ahi'
assert 'rows_per_scan' not in v.coords.get('longitude').attrs
if v.attrs["name"] in ["variable1", "variable2"]:
if v.attrs["name"] == 'variable1':
assert "valid_range" not in v.attrs
assert v.dtype == np.float64
assert "_FillValue" not in v.attrs
# should have file variable and one alias for reflectance
elif v.attrs["name"] in ["refl_0_65um_nom", "C03"]:
assert isinstance(v.attrs["valid_range"], list)
assert v.dtype == np.float32
assert v.dtype == np.float64
assert "_FillValue" not in v.attrs.keys()
assert (v.attrs["file_key"] == "refl_0_65um_nom")
else:
assert (datasets['variable3'].attrs.get('flag_meanings')) is not None
assert (datasets['variable3'].attrs.get('flag_meanings') == '<flag_meanings_unknown>')
Expand Down

0 comments on commit c6d0122

Please sign in to comment.