From 04c34d58da528012393bd0819a789ca53d9fa3ff Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Mon, 18 Jun 2018 09:04:58 +0200 Subject: [PATCH 01/24] Feature add reading of SAR OCN wind product. Not working --- satpy/etc/readers/safe_sar_l2_ocn.yaml | 36 +++++++++ satpy/readers/safe_sar_l2_ocn.py | 101 +++++++++++++++++++++++++ 2 files changed, 137 insertions(+) create mode 100644 satpy/etc/readers/safe_sar_l2_ocn.yaml create mode 100644 satpy/readers/safe_sar_l2_ocn.py diff --git a/satpy/etc/readers/safe_sar_l2_ocn.yaml b/satpy/etc/readers/safe_sar_l2_ocn.yaml new file mode 100644 index 0000000000..4d163a17b2 --- /dev/null +++ b/satpy/etc/readers/safe_sar_l2_ocn.yaml @@ -0,0 +1,36 @@ +reader: + description: SAFE Reader for SAR L2 OCN data + name: safe_sar_l2_ocn + sensors: [sar-c] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + safe_measurement: + file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFENC + file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.nc'] + +datasets: + owiLat: + name: owiLat + file_type: safe_measurement + standard_name: latitude + units: degree + + owiLon: + name: owiLon + file_type: safe_measurement + standard_name: longitude + units: degree + + owiWindSpeed: + name: owiWindSpeed + sensor: sar-c + wavelength: [5.400, 5.405, 5.410] + file_type: safe_measurement + + owiWindDirection: + name: owiWindDirection + sensor: sar-c + wavelength: [5.400, 5.405, 5.410] + file_type: safe_measurement diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py new file mode 100644 index 0000000000..e5bedf59a0 --- /dev/null +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2018 Trygve Aspenes + +# Author(s): + +# Trygve Aspenes + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""SAFE SAR L2 OCN format.""" + +import logging +import os + +from satpy.readers.file_handlers import BaseFileHandler +from satpy import CHUNK_SIZE + +import xarray as xr + +logger = logging.getLogger(__name__) + +class SAFENC(BaseFileHandler): + """Measurement file reader.""" + + def __init__(self, filename, filename_info, filetype_info): + print "INIT SAFENC" + super(SAFENC, self).__init__(filename, filename_info, + filetype_info) + + self._start_time = filename_info['start_time'] + self._end_time = filename_info['end_time'] + + self._polarization = filename_info['polarization'] + + self.lats = None + self.lons = None + self._shape = None + self.area = None + + self.nc = xr.open_dataset(filename, + decode_cf=True, + mask_and_scale=False, + chunks={'owiAzSize': CHUNK_SIZE, + 'owiRaSize': CHUNK_SIZE}) + print self.nc + print self.nc['owiWindDirection'] + self.filename = filename + print "END INIT" + #self.get_gdal_filehandle() + + def get_dataset(self, key, info): + """Load a dataset.""" + logger.debug("REader %s %s",key, info) + #if self._polarization != key.polarization: + # return + + logger.debug('Reading keyname %s.', key.name) + if key.name in ['owiLat', 'owiLon']: + logger.debug('Constructing coordinate arrays ll.') + + if self.lons is None or self.lats is None: + self.lons = self.nc['owiLon'] + self.lats = self.nc['owiLat'] + + if key.name == 'owiLat': + res = self.lats + else: + res = self.lons + res.attrs = info + else: + logger.debug("Read data") + res = self.nc[key.name] + res.attrs.update({'platform_name': self.nc.attrs['missionName'], + 'sensor': 'tullball'}) + + res.attrs.update(info) + + if not self._shape: + self._shape = res.shape + + return res + + @property + def start_time(self): + return self._start_time + + @property + def end_time(self): + return self._end_time From d11a5eaf5ba665f6ea167c16caefbba61a666b77 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Mon, 18 Jun 2018 22:16:17 +0200 Subject: [PATCH 02/24] Some more test --- satpy/readers/safe_sar_l2_ocn.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py index e5bedf59a0..f9fb78b800 100644 --- a/satpy/readers/safe_sar_l2_ocn.py +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -27,6 +27,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE +import numpy as np import xarray as xr logger = logging.getLogger(__name__) @@ -82,11 +83,14 @@ def get_dataset(self, key, info): else: logger.debug("Read data") res = self.nc[key.name] - res.attrs.update({'platform_name': self.nc.attrs['missionName'], - 'sensor': 'tullball'}) - res.attrs.update(info) - + if '_FillValue' in res.attrs: + res = res.where(res != res.attrs['_FillValue']) + res.attrs['_FillValue'] = np.nan + + + print "DATA:", self.nc[key.name] + print "END" if not self._shape: self._shape = res.shape From b146c892bdc9ff05f570d56307a322516571fdbd Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Sat, 23 Jun 2018 08:37:42 +0200 Subject: [PATCH 03/24] try readers with mitiff --- satpy/readers/safe_sar_l2_ocn.py | 5 +++++ satpy/writers/mitiff.py | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py index f9fb78b800..c7f0d791b8 100644 --- a/satpy/readers/safe_sar_l2_ocn.py +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -91,6 +91,11 @@ def get_dataset(self, key, info): print "DATA:", self.nc[key.name] print "END" + + #print self.nc.attrs + if 'missionName' in self.nc.attrs: + res.attrs.update({'platform_name': self.nc.attrs['missionName']}) + if not self._shape: self._shape = res.shape diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index c4a43af69c..4229293641 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -26,6 +26,7 @@ import logging import numpy as np +import xarray as xr from satpy.writers import ImageWriter @@ -533,9 +534,14 @@ def _make_image_description(self, datasets, **kwargs): _image_description += ' Channels: ' + print "dataset: ", datasets + print "type dataset: ", type(datasets) if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) _image_description += str(len(datasets)) + elif isinstance(datasets, xr.core.dataarray.DataArray): + LOG.debug("len datasets: xarray 1", ) + _image_description += str(1) else: LOG.debug("len datasets: %s", datasets.sizes['bands']) _image_description += str(datasets.sizes['bands']) From a2e123c8bcf06e88cc35b8073eb55960553f5b62 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Tue, 27 Nov 2018 09:39:12 +0100 Subject: [PATCH 04/24] On the way to a working reader. Not working yet --- satpy/etc/readers/safe_sar_l2_ocn.yaml | 4 + satpy/readers/safe_sar_l2_ocn.py | 102 ++++++++++++++++++++++++- satpy/writers/mitiff.py | 4 + 3 files changed, 108 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/safe_sar_l2_ocn.yaml b/satpy/etc/readers/safe_sar_l2_ocn.yaml index 4d163a17b2..50782bf241 100644 --- a/satpy/etc/readers/safe_sar_l2_ocn.yaml +++ b/satpy/etc/readers/safe_sar_l2_ocn.yaml @@ -9,6 +9,10 @@ file_types: safe_measurement: file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFENC file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.nc'] + requires: [safe_manifest] + safe_manifest: + file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFEXML + file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/manifest.safe'] datasets: owiLat: diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py index c7f0d791b8..ed2c1b3ada 100644 --- a/satpy/readers/safe_sar_l2_ocn.py +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -23,6 +23,7 @@ import logging import os +import xml.etree.ElementTree as ET from satpy.readers.file_handlers import BaseFileHandler from satpy import CHUNK_SIZE @@ -32,14 +33,82 @@ logger = logging.getLogger(__name__) + +def dictify(r, root=True): + """Convert an ElementTree into a dict.""" + if root: + return {r.tag: dictify(r, False)} + d = {} + if r.text and r.text.strip(): + try: + return int(r.text) + except ValueError: + try: + return float(r.text) + except ValueError: + return r.text + for x in r.findall("./*"): + print x, x.tag + if x.tag in d and not isinstance(d[x.tag], list): + d[x.tag] = [d[x.tag]] + d[x.tag].append(dictify(x, False)) + else: + d[x.tag] = dictify(x, False) + return d + + +class SAFEXML(BaseFileHandler): + """XML file reader for the SAFE format.""" + + def __init__(self, filename, filename_info, filetype_info): + print 'SAFEXML init' + super(SAFEXML, self).__init__(filename, filename_info, filetype_info) + + #self._start_time = filename_info['fstart_time'] + #self._end_time = filename_info['fend_time'] + #self._polarization = filename_info['fpolarization'] + self.root = ET.parse(self.filename) + #rt = self.root.getroot() + #for coordinates in rt.findall('gml:coordinates'): + # print coordinates + #print 'After coordinates' + #print dictify(self.root.getroot()) + #self.hdr = {} + #if header_file is not None: + # self.hdr = header_file.get_metadata() + # print 'self.hdr', self.hdr + print "SAFEXML END INIT" + + def get_metadata(self): + """Convert the xml metadata to dict.""" + print "get_metadata" + return dictify(self.root.getroot()) + + def get_dataset(self, key, info): + print "get_dataset XML" + return + + # @property +# def start_time(self): +# return self._start_time + +# @property +# def end_time(self): +# return self._end_time + + class SAFENC(BaseFileHandler): """Measurement file reader.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, manifest_fh): print "INIT SAFENC" super(SAFENC, self).__init__(filename, filename_info, filetype_info) + self.mainfest = manifest_fh + print "manifest_fh ", manifest_fh + self.manifest.get_metadata() + self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] @@ -49,12 +118,14 @@ def __init__(self, filename, filename_info, filetype_info): self.lons = None self._shape = None self.area = None - + self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, chunks={'owiAzSize': CHUNK_SIZE, 'owiRaSize': CHUNK_SIZE}) + self.nc = self.nc.rename({'owiAzSize': 'x'}) + self.nc = self.nc.rename({'owiRaSize': 'y'}) print self.nc print self.nc['owiWindDirection'] self.filename = filename @@ -83,6 +154,7 @@ def get_dataset(self, key, info): else: logger.debug("Read data") res = self.nc[key.name] + res = xr.DataArray(res, dims=['y', 'x']) res.attrs.update(info) if '_FillValue' in res.attrs: res = res.where(res != res.attrs['_FillValue']) @@ -96,6 +168,7 @@ def get_dataset(self, key, info): if 'missionName' in self.nc.attrs: res.attrs.update({'platform_name': self.nc.attrs['missionName']}) + print "res.shape: ",res.shape if not self._shape: self._shape = res.shape @@ -108,3 +181,28 @@ def start_time(self): @property def end_time(self): return self._end_time + + def get_area_def(self, ds_id): + data = self[ds_id.name] + + proj_dict = { + 'proj': 'latlong', + 'datum': 'WGS84', + 'ellps': 'WGS84', + 'no_defs': True + } + + area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), + data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] + + area = geometry.AreaDefinition( + 'sar_ocn_area', + 'name_of_proj', + 'id_of_proj', + proj_dict, + int(self.filename_info['dim0']), + int(self.filename_info['dim1']), + np.asarray(area_extent) + ) + + return area diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 557f0a14b3..a901345b48 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -197,9 +197,13 @@ def _channel_names(self, channels, cns, **kwargs): return _image_description def _add_sizes(self, datasets, first_dataset): + print "datasets.sizes: ", datasets.sizes + print type(datasets.sizes) _image_description = ' Xsize: ' if isinstance(datasets, list): _image_description += str(first_dataset.sizes['x']) + '\n' + elif isinstance(datasets, dict): + print "DICT?" else: _image_description += str(datasets.sizes['x']) + '\n' From c3e5fad66d09588d2e6f4015c90a09e7f674ca05 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Tue, 27 Nov 2018 10:12:50 +0100 Subject: [PATCH 05/24] Test --- satpy/etc/readers/nc_goes.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/etc/readers/nc_goes.yaml b/satpy/etc/readers/nc_goes.yaml index 1df805decc..bf36697ec2 100644 --- a/satpy/etc/readers/nc_goes.yaml +++ b/satpy/etc/readers/nc_goes.yaml @@ -13,7 +13,8 @@ reader: file_types: nc_goes_00_7: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler - file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc'] + file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc', + 'goes15.{start_time:%Y.%j.%H%M%S}_1.nc'] nc_goes_03_9: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_02.nc'] From f2d1634b88ee7f0108b7d60d3f245bb3671eec73 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Thu, 29 Nov 2018 12:50:01 +0100 Subject: [PATCH 06/24] First attempt to include reading the Eumetsat formated GOES15 CLASS netcdf data --- satpy/etc/composites/goes_imager.yaml | 11 +- satpy/etc/readers/nc_goes.yaml | 63 +++-- satpy/readers/nc_goes.py | 337 ++++++++++++++++++++++++++ 3 files changed, 392 insertions(+), 19 deletions(-) diff --git a/satpy/etc/composites/goes_imager.yaml b/satpy/etc/composites/goes_imager.yaml index f4cce65279..bfb81edd96 100644 --- a/satpy/etc/composites/goes_imager.yaml +++ b/satpy/etc/composites/goes_imager.yaml @@ -1,2 +1,11 @@ # XXX arb -sensor_name: visir/goes_imager \ No newline at end of file +sensor_name: visir/goes_imager + +composites: + overview: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - '00_7' + - '00_7' + - '10_7' + standard_name: overview diff --git a/satpy/etc/readers/nc_goes.yaml b/satpy/etc/readers/nc_goes.yaml index bf36697ec2..cf0852811c 100644 --- a/satpy/etc/readers/nc_goes.yaml +++ b/satpy/etc/readers/nc_goes.yaml @@ -13,17 +13,29 @@ reader: file_types: nc_goes_00_7: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler - file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc', - 'goes15.{start_time:%Y.%j.%H%M%S}_1.nc'] + file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc'] + eum_nc_goes_00_7: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMNCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_1.nc'] + requires: [eum_nc_goes_geo] + nc_goes_03_9: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_02.nc'] + eum_nc_goes_03_9: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMNCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_2.nc'] + requires: [eum_nc_goes_geo] nc_goes_06_5: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] + eum_nc_goes_06_5: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMNCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_3.nc'] + requires: [eum_nc_goes_geo] nc_goes_06_8: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', @@ -33,18 +45,33 @@ file_types: nc_goes_10_7: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_04.nc'] + eum_nc_goes_10_7: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMNCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_4.nc'] + requires: [eum_nc_goes_geo] nc_goes_12_0: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_05.nc'] + eum_nc_goes_12_0: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMNCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_5.nc'] + requires: [eum_nc_goes_geo] nc_goes_13_3: file_reader: !!python/name:satpy.readers.nc_goes.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_06.nc'] + eum_nc_goes_13_3: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMNCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_6.nc'] + requires: [eum_nc_goes_geo] + eum_nc_goes_geo: + file_reader: !!python/name:satpy.readers.nc_goes.GOESEUMGEONCFileHandler + file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_geo.nc'] datasets: '00_7': @@ -58,12 +85,12 @@ datasets: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: - comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. + #comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. units: "%" coordinates: - longitude_00_7 - latitude_00_7 - file_type: nc_goes_00_7 + file_type: [nc_goes_00_7, eum_nc_goes_00_7] '03_9': name: '03_9' @@ -81,7 +108,7 @@ datasets: coordinates: - longitude_03_9 - latitude_03_9 - file_type: nc_goes_03_9 + file_type: [nc_goes_03_9, eum_nc_goes_03_9] '06_5': name: '06_5' @@ -99,7 +126,7 @@ datasets: coordinates: - longitude_06_5 - latitude_06_5 - file_type: nc_goes_06_5 + file_type: [nc_goes_06_5, eum_nc_goes_06_5] '06_8': name: '06_8' @@ -135,7 +162,7 @@ datasets: coordinates: - longitude_10_7 - latitude_10_7 - file_type: nc_goes_10_7 + file_type: [nc_goes_10_7, eum_nc_goes_10_7] '12_0': name: '12_0' @@ -171,41 +198,41 @@ datasets: coordinates: - longitude_13_3 - latitude_13_3 - file_type: nc_goes_13_3 + file_type: [nc_goes_13_3, eum_nc_goes_13_3] longitude_00_7: name: longitude_00_7 - file_type: nc_goes_00_7 + file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_00_7: name: latitude_00_7 - file_type: nc_goes_00_7 + file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_03_9: name: longitude_03_9 - file_type: nc_goes_03_9 + file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_03_9: name: latitude_03_9 - file_type: nc_goes_03_9 + file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_5: name: longitude_06_5 - file_type: nc_goes_06_5 + file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_06_5: name: latitude_06_5 - file_type: nc_goes_06_5 + file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: latitude units: degrees_north @@ -223,13 +250,13 @@ datasets: longitude_10_7: name: longitude_10_7 - file_type: nc_goes_10_7 + file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_10_7: name: latitude_10_7 - file_type: nc_goes_10_7 + file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north @@ -247,12 +274,12 @@ datasets: longitude_13_3: name: longitude_13_3 - file_type: nc_goes_13_3 + file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_13_3: name: latitude_13_3 - file_type: nc_goes_13_3 + file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: latitude units: degrees_north diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 4ed1d02f81..0818bb930a 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -1082,6 +1082,343 @@ def _float(self, string): return fac * float(string) +class GOESEUMNCFileHandler(BaseFileHandler): + """File handler for GOES Imager data in EUM netCDF format""" + def __init__(self, filename, filename_info, filetype_info, geo_data): + """Initialize the reader.""" + super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, + filetype_info) + print('INIT EUM') + + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=False, + chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + self.sensor = 'goes_imager' + self.nlines = self.nc.dims['yc'] + self.ncols = self.nc.dims['xc'] + self.platform_name = self._get_platform_name( + self.nc.attrs['Satellite Sensor']) + self.platform_shortname = self.platform_name.replace('-', '').lower() + self.gvar_channel = int(self.nc['bands'].values) + self.sector = self._get_sector(channel=self.gvar_channel, + nlines=self.nlines, + ncols=self.ncols) + self._meta = None + + @staticmethod + def _get_platform_name(ncattr): + """Determine name of the platform""" + match = re.match(r'G-(\d+)', ncattr) + if match: + return SPACECRAFTS.get(int(match.groups()[0])) + + return None + + def _get_sector(self, channel, nlines, ncols): + """Determine which sector was scanned""" + if self._is_vis(channel): + margin = 100 + sectors_ref = VIS_SECTORS + else: + margin = 50 + sectors_ref = IR_SECTORS + + for (nlines_ref, ncols_ref), sector in sectors_ref.items(): + if np.fabs(ncols - ncols_ref) < margin and \ + np.fabs(nlines - nlines_ref) < margin: + return sector + + return UNKNOWN_SECTOR + + @staticmethod + def _is_vis(channel): + """Determine whether the given channel is a visible channel""" + if isinstance(channel, str): + return channel == '00_7' + elif isinstance(channel, int): + return channel == 1 + else: + raise ValueError('Invalid channel') + + @property + def meta(self): + """Derive metadata from the coordinates""" + # Use buffered data if available + return self._meta + + def get_dataset(self, key, info): + """Load dataset designated by the given key from file""" + logger.debug('Reading dataset {}'.format(key.name)) + + print "data select:", self.nc['data'].isel(time=0) + print key.calibration + print key.name + tic = datetime.now() + data = self.calibrate(self.nc['data'].isel(time=0), + calibration=key.calibration, + channel=key.name) + logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + + # Mask space pixels + #data = data.where(self.meta['earth_mask']) + + # Set proper dimension names + data = data.rename({'xc': 'x', 'yc': 'y'}) + print 'data before drop', data + data = data.drop('time') + print 'data after drop', data + + # Update metadata + data.attrs.update(info) + data.attrs.update( + {'platform_name': self.platform_name, + 'sensor': self.sensor} + ) + print data + return data + + def calibrate(self, counts, calibration, channel): + """Perform calibration""" + # Convert 16bit counts from netCDF4 file to the original 10bit + # GVAR counts by dividing by 32. See [FAQ]. + #counts = counts / 32. + + print self.platform_name, channel + coefs = CALIB_COEFS[self.platform_name][channel] + print('calibration: ', calibration) + if calibration == 'counts': + return counts + elif calibration in ['radiance', 'reflectance', + 'brightness_temperature']: + #radiance = self._counts2radiance(counts=counts, coefs=coefs, + #channel=channel) + radiance = counts + if calibration == 'radiance': + return radiance + + return self._calibrate(radiance=radiance, coefs=coefs, + channel=channel, calibration=calibration) + else: + raise ValueError('Unsupported calibration for channel {}: {}' + .format(channel, calibration)) + + def _counts2radiance(self, counts, coefs, channel): + """Convert raw detector counts to radiance""" + logger.debug('Converting counts to radiance') + + if self._is_vis(channel): + # Since the scanline-detector assignment is unknown, use the average + # coefficients for all scanlines. + slope = np.array(coefs['slope']).mean() + offset = np.array(coefs['offset']).mean() + return self._viscounts2radiance(counts=counts, slope=slope, + offset=offset) + + return self._ircounts2radiance(counts=counts, scale=coefs['scale'], + offset=coefs['offset']) + + def _calibrate(self, radiance, coefs, channel, calibration): + """Convert radiance to reflectance or brightness temperature""" + if self._is_vis(channel): + if not calibration == 'reflectance': + raise ValueError('Cannot calibrate VIS channel to ' + '{}'.format(calibration)) + return self._calibrate_vis(radiance=radiance, k=coefs['k']) + else: + if not calibration == 'brightness_temperature': + raise ValueError('Cannot calibrate IR channel to ' + '{}'.format(calibration)) + + # Since the scanline-detector assignment is unknown, use the average + # coefficients for all scanlines. + mean_coefs = {'a': np.array(coefs['a']).mean(), + 'b': np.array(coefs['b']).mean(), + 'n': np.array(coefs['n']).mean(), + 'btmin': coefs['btmin'], + 'btmax': coefs['btmax']} + return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) + + @staticmethod + def _ircounts2radiance(counts, scale, offset): + """Convert IR counts to radiance + + Reference: [IR]. + + Args: + counts: Raw detector counts + scale: Scale [mW-1 m2 cm sr] + offset: Offset [1] + + Returns: + Radiance [mW m-2 cm-1 sr-1] + """ + rad = (counts - offset) / scale + return counts.clip(min=0) + + @staticmethod + def _calibrate_ir(radiance, coefs): + """Convert IR radiance to brightness temperature + + Reference: [IR] + + Args: + radiance: Radiance [mW m-2 cm-1 sr-1] + coefs: Dictionary of calibration coefficients. Keys: + n: The channel's central wavenumber [cm-1] + a: Offset [K] + b: Slope [1] + btmin: Minimum brightness temperature threshold [K] + btmax: Maximum brightness temperature threshold [K] + + Returns: + Brightness temperature [K] + """ + logger.debug('Calibrating to brightness temperature') + + # Compute brightness temperature using inverse Planck formula + n = coefs['n'] + bteff = C2 * n / xu.log(1 + C1 * n**3 / radiance.where(radiance > 0)) + bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) + + # Apply BT threshold + return bt.where(xu.logical_and(bt >= coefs['btmin'], + bt <= coefs['btmax'])) + + @staticmethod + def _viscounts2radiance(counts, slope, offset): + """Convert VIS counts to radiance + + References: [VIS] + + Args: + counts: Raw detector counts + slope: Slope [W m-2 um-1 sr-1] + offset: Offset [W m-2 um-1 sr-1] + Returns: + Radiance [W m-2 um-1 sr-1] + """ + #rad = counts * slope + offset + return counts.clip(min=0) + + @staticmethod + def _calibrate_vis(radiance, k): + """Convert VIS radiance to reflectance + + Note: Angle of incident radiation and annual variation of the + earth-sun distance is not taken into account. A value of 100% + corresponds to the radiance of a perfectly reflecting diffuse surface + illuminated at normal incidence when the sun is at its annual-average + distance from the Earth. + + TODO: Take angle of incident radiation (cos sza) and annual variation + of the earth-sun distance into account. + + Reference: [VIS] + + Args: + radiance: Radiance [mW m-2 cm-1 sr-1] + k: pi / H, where H is the solar spectral irradiance at + annual-average sun-earth distance, averaged over the spectral + response function of the detector). Units of k: [m2 um sr W-1] + Returns: + Reflectance [%] + """ + logger.debug('Calibrating to reflectance') + #refl = 100 * k * radiance + #refl = k + return radiance.clip(min=0) + + def __del__(self): + try: + self.nc.close() + except (AttributeError, IOError, OSError): + pass + + +class GOESEUMGEONCFileHandler(BaseFileHandler): + """File handler for GOES Geolocation data in EUM netCDF format""" + def __init__(self, filename, filename_info, filetype_info): + """Initialize the reader.""" + super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, + filetype_info) + print('INIT EUM GEO') + + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=False, + chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + self.sensor = 'goes_imager' + self.nlines = self.nc.dims['yc'] + self.ncols = self.nc.dims['xc'] + self.platform_name = self._get_platform_name( + self.nc.attrs['Satellite Sensor']) + self.platform_shortname = self.platform_name.replace('-', '').lower() + #self.gvar_channel = "1" # int(self.nc['bands'].values) + #self.sector = self._get_sector(channel=self.gvar_channel, + # nlines=self.nlines, + # ncols=self.ncols) + self._meta = None + + def get_dataset(self, key, info): + """Load dataset designated by the given key from file""" + logger.debug('Reading dataset {}'.format(key.name)) + + # Read data from file and calibrate if necessary + if 'longitude' in key.name: + data = self.nc['lon'] + elif 'latitude' in key.name: + data = self.nc['lat'] + else: + logger.debug("Unknown key.name: ", key.name) + + print "LONLAT data: ", data + # Set proper dimension names + data = data.rename({'xc': 'x', 'yc': 'y'}) + # Update metadata + data.attrs.update(info) + #data.attrs.update( + # {'platform_name': self.platform_name, + # 'sensor': self.sensor, + # 'sector': self.sector} + #) + return data + + @staticmethod + def _get_platform_name(ncattr): + """Determine name of the platform""" + match = re.match(r'G-(\d+)', ncattr) + if match: + return SPACECRAFTS.get(int(match.groups()[0])) + + return None + + def _get_sector(self, channel, nlines, ncols): + """Determine which sector was scanned""" + if self._is_vis(channel): + margin = 100 + sectors_ref = VIS_SECTORS + else: + margin = 50 + sectors_ref = IR_SECTORS + + for (nlines_ref, ncols_ref), sector in sectors_ref.items(): + if np.fabs(ncols - ncols_ref) < margin and \ + np.fabs(nlines - nlines_ref) < margin: + return sector + + return UNKNOWN_SECTOR + + @staticmethod + def _is_vis(channel): + """Determine whether the given channel is a visible channel""" + if isinstance(channel, str): + return channel == '00_7' + elif isinstance(channel, int): + return channel == 1 + else: + raise ValueError('Invalid channel') + def test_coefs(ir_url, vis_url): """Test calibration coefficients against NOAA reference pages From 28ea77918dd500a3ae09ed3012ceb530f5ff0cbc Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 30 Nov 2018 15:02:25 +0100 Subject: [PATCH 07/24] All datasets to dataarray. Drop file_list as an attribute due to concat issues --- satpy/readers/nc_goes.py | 47 +++++++++++++++------------------------- 1 file changed, 18 insertions(+), 29 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 0818bb930a..ff4c132b7a 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -1088,8 +1088,6 @@ def __init__(self, filename, filename_info, filetype_info, geo_data): """Initialize the reader.""" super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, filetype_info) - print('INIT EUM') - self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, @@ -1151,9 +1149,6 @@ def get_dataset(self, key, info): """Load dataset designated by the given key from file""" logger.debug('Reading dataset {}'.format(key.name)) - print "data select:", self.nc['data'].isel(time=0) - print key.calibration - print key.name tic = datetime.now() data = self.calibrate(self.nc['data'].isel(time=0), calibration=key.calibration, @@ -1165,34 +1160,37 @@ def get_dataset(self, key, info): # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) - print 'data before drop', data data = data.drop('time') - print 'data after drop', data - + + # If the file_type attribute is a list and the data is xarray + # the concat of the dataset will not work. As the file_type is + # not needed this will be popped here. + if 'file_type' in info: + info.pop('file_type') + # Update metadata data.attrs.update(info) data.attrs.update( {'platform_name': self.platform_name, 'sensor': self.sensor} ) - print data return data def calibrate(self, counts, calibration, channel): """Perform calibration""" # Convert 16bit counts from netCDF4 file to the original 10bit # GVAR counts by dividing by 32. See [FAQ]. - #counts = counts / 32. + # FIXME quick hack as EUM data already are Albedo or radiance + # counts = counts / 32. - print self.platform_name, channel coefs = CALIB_COEFS[self.platform_name][channel] - print('calibration: ', calibration) if calibration == 'counts': return counts elif calibration in ['radiance', 'reflectance', 'brightness_temperature']: - #radiance = self._counts2radiance(counts=counts, coefs=coefs, - #channel=channel) + # FIXME quick hack as the EUM data is already in Albedo + # radiance = self._counts2radiance(counts=counts, coefs=coefs, + # channel=channel) radiance = counts if calibration == 'radiance': return radiance @@ -1254,7 +1252,7 @@ def _ircounts2radiance(counts, scale, offset): Radiance [mW m-2 cm-1 sr-1] """ rad = (counts - offset) / scale - return counts.clip(min=0) + return rad.clip(min=0) @staticmethod def _calibrate_ir(radiance, coefs): @@ -1279,8 +1277,7 @@ def _calibrate_ir(radiance, coefs): # Compute brightness temperature using inverse Planck formula n = coefs['n'] bteff = C2 * n / xu.log(1 + C1 * n**3 / radiance.where(radiance > 0)) - bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) - + bt = (bteff * coefs['b'] + coefs['a']) # Apply BT threshold return bt.where(xu.logical_and(bt >= coefs['btmin'], bt <= coefs['btmax'])) @@ -1325,6 +1322,8 @@ def _calibrate_vis(radiance, k): Reflectance [%] """ logger.debug('Calibrating to reflectance') + # FIXME: quick hack commented out here as VIS is aleady calivrated to + # Albdo #refl = 100 * k * radiance #refl = k return radiance.clip(min=0) @@ -1342,8 +1341,6 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, filetype_info) - print('INIT EUM GEO') - self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, @@ -1354,10 +1351,6 @@ def __init__(self, filename, filename_info, filetype_info): self.platform_name = self._get_platform_name( self.nc.attrs['Satellite Sensor']) self.platform_shortname = self.platform_name.replace('-', '').lower() - #self.gvar_channel = "1" # int(self.nc['bands'].values) - #self.sector = self._get_sector(channel=self.gvar_channel, - # nlines=self.nlines, - # ncols=self.ncols) self._meta = None def get_dataset(self, key, info): @@ -1372,16 +1365,11 @@ def get_dataset(self, key, info): else: logger.debug("Unknown key.name: ", key.name) - print "LONLAT data: ", data # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) + # Update metadata data.attrs.update(info) - #data.attrs.update( - # {'platform_name': self.platform_name, - # 'sensor': self.sensor, - # 'sector': self.sector} - #) return data @staticmethod @@ -1419,6 +1407,7 @@ def _is_vis(channel): else: raise ValueError('Invalid channel') + def test_coefs(ir_url, vis_url): """Test calibration coefficients against NOAA reference pages From c9b42c94d392d8782d3dfc82fd0aefc31f0eea04 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 30 Nov 2018 15:25:40 +0100 Subject: [PATCH 08/24] Reverted mixed up changes in mitiff writer and fixes the nc_goes.yaml --- satpy/etc/readers/nc_goes.yaml | 2 +- satpy/etc/readers/safe_sar_l2_ocn.yaml | 40 ----- satpy/readers/safe_sar_l2_ocn.py | 208 ------------------------- satpy/writers/mitiff.py | 10 -- 4 files changed, 1 insertion(+), 259 deletions(-) delete mode 100644 satpy/etc/readers/safe_sar_l2_ocn.yaml delete mode 100644 satpy/readers/safe_sar_l2_ocn.py diff --git a/satpy/etc/readers/nc_goes.yaml b/satpy/etc/readers/nc_goes.yaml index cf0852811c..bf5084b37e 100644 --- a/satpy/etc/readers/nc_goes.yaml +++ b/satpy/etc/readers/nc_goes.yaml @@ -85,7 +85,7 @@ datasets: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: - #comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. + comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. units: "%" coordinates: - longitude_00_7 diff --git a/satpy/etc/readers/safe_sar_l2_ocn.yaml b/satpy/etc/readers/safe_sar_l2_ocn.yaml deleted file mode 100644 index 50782bf241..0000000000 --- a/satpy/etc/readers/safe_sar_l2_ocn.yaml +++ /dev/null @@ -1,40 +0,0 @@ -reader: - description: SAFE Reader for SAR L2 OCN data - name: safe_sar_l2_ocn - sensors: [sar-c] - default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - -file_types: - safe_measurement: - file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFENC - file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.nc'] - requires: [safe_manifest] - safe_manifest: - file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFEXML - file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/manifest.safe'] - -datasets: - owiLat: - name: owiLat - file_type: safe_measurement - standard_name: latitude - units: degree - - owiLon: - name: owiLon - file_type: safe_measurement - standard_name: longitude - units: degree - - owiWindSpeed: - name: owiWindSpeed - sensor: sar-c - wavelength: [5.400, 5.405, 5.410] - file_type: safe_measurement - - owiWindDirection: - name: owiWindDirection - sensor: sar-c - wavelength: [5.400, 5.405, 5.410] - file_type: safe_measurement diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py deleted file mode 100644 index ed2c1b3ada..0000000000 --- a/satpy/readers/safe_sar_l2_ocn.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright (c) 2018 Trygve Aspenes - -# Author(s): - -# Trygve Aspenes - -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -"""SAFE SAR L2 OCN format.""" - -import logging -import os -import xml.etree.ElementTree as ET - -from satpy.readers.file_handlers import BaseFileHandler -from satpy import CHUNK_SIZE - -import numpy as np -import xarray as xr - -logger = logging.getLogger(__name__) - - -def dictify(r, root=True): - """Convert an ElementTree into a dict.""" - if root: - return {r.tag: dictify(r, False)} - d = {} - if r.text and r.text.strip(): - try: - return int(r.text) - except ValueError: - try: - return float(r.text) - except ValueError: - return r.text - for x in r.findall("./*"): - print x, x.tag - if x.tag in d and not isinstance(d[x.tag], list): - d[x.tag] = [d[x.tag]] - d[x.tag].append(dictify(x, False)) - else: - d[x.tag] = dictify(x, False) - return d - - -class SAFEXML(BaseFileHandler): - """XML file reader for the SAFE format.""" - - def __init__(self, filename, filename_info, filetype_info): - print 'SAFEXML init' - super(SAFEXML, self).__init__(filename, filename_info, filetype_info) - - #self._start_time = filename_info['fstart_time'] - #self._end_time = filename_info['fend_time'] - #self._polarization = filename_info['fpolarization'] - self.root = ET.parse(self.filename) - #rt = self.root.getroot() - #for coordinates in rt.findall('gml:coordinates'): - # print coordinates - #print 'After coordinates' - #print dictify(self.root.getroot()) - #self.hdr = {} - #if header_file is not None: - # self.hdr = header_file.get_metadata() - # print 'self.hdr', self.hdr - print "SAFEXML END INIT" - - def get_metadata(self): - """Convert the xml metadata to dict.""" - print "get_metadata" - return dictify(self.root.getroot()) - - def get_dataset(self, key, info): - print "get_dataset XML" - return - - # @property -# def start_time(self): -# return self._start_time - -# @property -# def end_time(self): -# return self._end_time - - -class SAFENC(BaseFileHandler): - """Measurement file reader.""" - - def __init__(self, filename, filename_info, filetype_info, manifest_fh): - print "INIT SAFENC" - super(SAFENC, self).__init__(filename, filename_info, - filetype_info) - - self.mainfest = manifest_fh - print "manifest_fh ", manifest_fh - self.manifest.get_metadata() - - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] - - self._polarization = filename_info['polarization'] - - self.lats = None - self.lons = None - self._shape = None - self.area = None - - self.nc = xr.open_dataset(filename, - decode_cf=True, - mask_and_scale=False, - chunks={'owiAzSize': CHUNK_SIZE, - 'owiRaSize': CHUNK_SIZE}) - self.nc = self.nc.rename({'owiAzSize': 'x'}) - self.nc = self.nc.rename({'owiRaSize': 'y'}) - print self.nc - print self.nc['owiWindDirection'] - self.filename = filename - print "END INIT" - #self.get_gdal_filehandle() - - def get_dataset(self, key, info): - """Load a dataset.""" - logger.debug("REader %s %s",key, info) - #if self._polarization != key.polarization: - # return - - logger.debug('Reading keyname %s.', key.name) - if key.name in ['owiLat', 'owiLon']: - logger.debug('Constructing coordinate arrays ll.') - - if self.lons is None or self.lats is None: - self.lons = self.nc['owiLon'] - self.lats = self.nc['owiLat'] - - if key.name == 'owiLat': - res = self.lats - else: - res = self.lons - res.attrs = info - else: - logger.debug("Read data") - res = self.nc[key.name] - res = xr.DataArray(res, dims=['y', 'x']) - res.attrs.update(info) - if '_FillValue' in res.attrs: - res = res.where(res != res.attrs['_FillValue']) - res.attrs['_FillValue'] = np.nan - - - print "DATA:", self.nc[key.name] - print "END" - - #print self.nc.attrs - if 'missionName' in self.nc.attrs: - res.attrs.update({'platform_name': self.nc.attrs['missionName']}) - - print "res.shape: ",res.shape - if not self._shape: - self._shape = res.shape - - return res - - @property - def start_time(self): - return self._start_time - - @property - def end_time(self): - return self._end_time - - def get_area_def(self, ds_id): - data = self[ds_id.name] - - proj_dict = { - 'proj': 'latlong', - 'datum': 'WGS84', - 'ellps': 'WGS84', - 'no_defs': True - } - - area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), - data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] - - area = geometry.AreaDefinition( - 'sar_ocn_area', - 'name_of_proj', - 'id_of_proj', - proj_dict, - int(self.filename_info['dim0']), - int(self.filename_info['dim1']), - np.asarray(area_extent) - ) - - return area diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index a901345b48..02ee6e7c19 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -26,7 +26,6 @@ import logging import numpy as np -import xarray as xr from satpy.writers import ImageWriter @@ -197,13 +196,9 @@ def _channel_names(self, channels, cns, **kwargs): return _image_description def _add_sizes(self, datasets, first_dataset): - print "datasets.sizes: ", datasets.sizes - print type(datasets.sizes) _image_description = ' Xsize: ' if isinstance(datasets, list): _image_description += str(first_dataset.sizes['x']) + '\n' - elif isinstance(datasets, dict): - print "DICT?" else: _image_description += str(datasets.sizes['x']) + '\n' @@ -509,14 +504,9 @@ def _make_image_description(self, datasets, **kwargs): _image_description += ' Channels: ' - print "dataset: ", datasets - print "type dataset: ", type(datasets) if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) _image_description += str(len(datasets)) - elif isinstance(datasets, xr.core.dataarray.DataArray): - LOG.debug("len datasets: xarray 1", ) - _image_description += str(1) elif 'bands' in datasets.sizes: LOG.debug("len datasets: %s", datasets.sizes['bands']) _image_description += str(datasets.sizes['bands']) From 7b6521c1393c171bce101be00f23ff7d308a32de Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 30 Nov 2018 15:29:38 +0100 Subject: [PATCH 09/24] Layout issues --- satpy/readers/nc_goes.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index ff4c132b7a..c40f56e8e8 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -1156,7 +1156,7 @@ def get_dataset(self, key, info): logger.debug('Calibration time: {}'.format(datetime.now() - tic)) # Mask space pixels - #data = data.where(self.meta['earth_mask']) + # data = data.where(self.meta['earth_mask']) # Set proper dimension names data = data.rename({'xc': 'x', 'yc': 'y'}) @@ -1167,7 +1167,7 @@ def get_dataset(self, key, info): # not needed this will be popped here. if 'file_type' in info: info.pop('file_type') - + # Update metadata data.attrs.update(info) data.attrs.update( @@ -1295,7 +1295,7 @@ def _viscounts2radiance(counts, slope, offset): Returns: Radiance [W m-2 um-1 sr-1] """ - #rad = counts * slope + offset + # rad = counts * slope + offset return counts.clip(min=0) @staticmethod @@ -1324,8 +1324,8 @@ def _calibrate_vis(radiance, k): logger.debug('Calibrating to reflectance') # FIXME: quick hack commented out here as VIS is aleady calivrated to # Albdo - #refl = 100 * k * radiance - #refl = k + # refl = 100 * k * radiance + # refl = k return radiance.clip(min=0) def __del__(self): From a2538a03de219b89b639ffc4f98f7a42af5e33ea Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 30 Nov 2018 15:36:06 +0100 Subject: [PATCH 10/24] stickler blank line with space removed --- satpy/readers/nc_goes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index c40f56e8e8..dbbc4e5d3a 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -1167,7 +1167,7 @@ def get_dataset(self, key, info): # not needed this will be popped here. if 'file_type' in info: info.pop('file_type') - + # Update metadata data.attrs.update(info) data.attrs.update( From 2ada13056269b4d1c4f348f809b3a0d82cfc0094 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 3 Dec 2018 17:23:22 +0000 Subject: [PATCH 11/24] Factorize common parts --- satpy/readers/nc_goes.py | 606 ++++++++++++++------------------------- 1 file changed, 208 insertions(+), 398 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index dbbc4e5d3a..f18717f6f3 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -124,6 +124,7 @@ [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html """ +from abc import abstractmethod from collections import namedtuple from datetime import datetime, timedelta import logging @@ -502,12 +503,12 @@ } # Source: [SCHED-W], [SCHED-E] -class GOESNCFileHandler(BaseFileHandler): +class GOESNCBaseFileHandler(BaseFileHandler): """File handler for GOES Imager data in netCDF format""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, geo_data=None): """Initialize the reader.""" - super(GOESNCFileHandler, self).__init__(filename, filename_info, - filetype_info) + super(GOESNCBaseFileHandler, self).__init__(filename, filename_info, + filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, @@ -523,6 +524,17 @@ def __init__(self, filename, filename_info, filetype_info): nlines=self.nlines, ncols=self.ncols) self._meta = None + self.geo_data = geo_data if geo_data is not None else self.nc + + @abstractmethod + def get_dataset(self, key, info): + """Load dataset designated by the given key from file""" + raise NotImplementedError + + @abstractmethod + def calibrate(self, data, calibration, channel): + """Perform calibration""" + raise NotImplementedError @staticmethod def _get_platform_name(ncattr): @@ -692,7 +704,7 @@ def meta(self): """Derive metadata from the coordinates""" # Use buffered data if available if self._meta is None: - lat = self.nc['lat'] + lat = self.geo_data['lat'] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) @@ -700,7 +712,7 @@ def meta(self): yaw_flip = self._is_yaw_flip(lat) del lat - lon = self.nc['lon'] + lon = self.geo_data['lon'] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) @@ -715,72 +727,6 @@ def meta(self): 'area_def_uni': area_def_uni} return self._meta - def get_dataset(self, key, info): - """Load dataset designated by the given key from file""" - logger.debug('Reading dataset {}'.format(key.name)) - - # Read data from file and calibrate if necessary - if 'longitude' in key.name: - data = self.nc['lon'] - elif 'latitude' in key.name: - data = self.nc['lat'] - else: - tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key.calibration, - channel=key.name) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) - - # Mask space pixels - data = data.where(self.meta['earth_mask']) - - # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) - - # Update metadata - data.attrs.update(info) - data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor, - 'sector': self.sector, - 'yaw_flip': self.meta['yaw_flip']} - ) - if self.meta['lon0'] is not None: - # Attributes only available for full disc images. YAML reader - # doesn't like it if satellite_* is present but None - data.attrs.update( - {'satellite_longitude': self.meta['lon0'], - 'satellite_latitude': self.meta['lat0'], - 'satellite_altitude': ALTITUDE, - 'nadir_row': self.meta['nadir_row'], - 'nadir_col': self.meta['nadir_col'], - 'area_def_uniform_sampling': self.meta['area_def_uni']} - ) - - return data - - def calibrate(self, counts, calibration, channel): - """Perform calibration""" - # Convert 16bit counts from netCDF4 file to the original 10bit - # GVAR counts by dividing by 32. See [FAQ]. - counts = counts / 32. - - coefs = CALIB_COEFS[self.platform_name][channel] - if calibration == 'counts': - return counts - elif calibration in ['radiance', 'reflectance', - 'brightness_temperature']: - radiance = self._counts2radiance(counts=counts, coefs=coefs, - channel=channel) - if calibration == 'radiance': - return radiance - - return self._calibrate(radiance=radiance, coefs=coefs, - channel=channel, calibration=calibration) - else: - raise ValueError('Unsupported calibration for channel {}: {}' - .format(channel, calibration)) - def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance""" logger.debug('Converting counts to radiance') @@ -913,6 +859,196 @@ def __del__(self): pass +class GOESNCFileHandler(GOESNCBaseFileHandler): + """File handler for GOES Imager data in netCDF format""" + def __init__(self, filename, filename_info, filetype_info): + """Initialize the reader.""" + super(GOESNCFileHandler, self).__init__(filename, filename_info, + filetype_info) + + def get_dataset(self, key, info): + """Load dataset designated by the given key from file""" + logger.debug('Reading dataset {}'.format(key.name)) + + # Read data from file and calibrate if necessary + if 'longitude' in key.name: + data = self.geo_data['lon'] + elif 'latitude' in key.name: + data = self.geo_data['lat'] + else: + tic = datetime.now() + data = self.calibrate(self.nc['data'].isel(time=0), + calibration=key.calibration, + channel=key.name) + logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + + # Mask space pixels + data = data.where(self.meta['earth_mask']) + + # Set proper dimension names + data = data.rename({'xc': 'x', 'yc': 'y'}) + + # Update metadata + data.attrs.update(info) + data.attrs.update( + {'platform_name': self.platform_name, + 'sensor': self.sensor, + 'sector': self.sector, + 'yaw_flip': self.meta['yaw_flip']} + ) + if self.meta['lon0'] is not None: + # Attributes only available for full disc images. YAML reader + # doesn't like it if satellite_* is present but None + data.attrs.update( + {'satellite_longitude': self.meta['lon0'], + 'satellite_latitude': self.meta['lat0'], + 'satellite_altitude': ALTITUDE, + 'nadir_row': self.meta['nadir_row'], + 'nadir_col': self.meta['nadir_col'], + 'area_def_uniform_sampling': self.meta['area_def_uni']} + ) + + return data + + def calibrate(self, counts, calibration, channel): + """Perform calibration""" + # Convert 16bit counts from netCDF4 file to the original 10bit + # GVAR counts by dividing by 32. See [FAQ]. + counts = counts / 32. + + coefs = CALIB_COEFS[self.platform_name][channel] + if calibration == 'counts': + return counts + elif calibration in ['radiance', 'reflectance', + 'brightness_temperature']: + radiance = self._counts2radiance(counts=counts, coefs=coefs, + channel=channel) + if calibration == 'radiance': + return radiance + + return self._calibrate(radiance=radiance, coefs=coefs, + channel=channel, calibration=calibration) + else: + raise ValueError('Unsupported calibration for channel {}: {}' + .format(channel, calibration)) + + +class GOESEUMNCFileHandler(GOESNCBaseFileHandler): + """File handler for GOES Imager data in EUM netCDF format + + TODO: Remove datasets which are not available in the file (counts, + VIS radiance) via available_datasets() -> See #434 + + """ + def __init__(self, filename, filename_info, filetype_info, geo_data): + """Initialize the reader.""" + super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, + filetype_info, geo_data) + + def get_dataset(self, key, info): + """Load dataset designated by the given key from file""" + logger.debug('Reading dataset {}'.format(key.name)) + + tic = datetime.now() + data = self.calibrate(self.nc['data'].isel(time=0), + calibration=key.calibration, + channel=key.name) + logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + + # Mask space pixels + data = data.where(self.meta['earth_mask']) + + # Set proper dimension names + data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.drop('time') + + # If the file_type attribute is a list and the data is xarray + # the concat of the dataset will not work. As the file_type is + # not needed this will be popped here. + if 'file_type' in info: + info.pop('file_type') + + # Update metadata + data.attrs.update(info) + data.attrs.update( + {'platform_name': self.platform_name, + 'sensor': self.sensor, + 'sector': self.sector, + 'yaw_flip': self.meta['yaw_flip']} + ) + if self.meta['lon0'] is not None: + # Attributes only available for full disc images. YAML reader + # doesn't like it if satellite_* is present but None + data.attrs.update( + {'satellite_longitude': self.meta['lon0'], + 'satellite_latitude': self.meta['lat0'], + 'satellite_altitude': ALTITUDE, + 'nadir_row': self.meta['nadir_row'], + 'nadir_col': self.meta['nadir_col'], + 'area_def_uniform_sampling': self.meta['area_def_uni']} + ) + + return data + + def calibrate(self, data, calibration, channel): + """Perform calibration""" + coefs = CALIB_COEFS[self.platform_name][channel] + is_vis = self._is_vis(channel) + + # IR files provide radiances, VIS file provides reflectances + if is_vis and calibration == 'reflectance': + return data + elif not is_vis and calibration == 'radiance': + return data + elif not is_vis and calibration == 'brightness_temperature': + return self._calibrate(radiance=data, calibration=calibration, + coefs=coefs, channel=channel) + else: + raise ValueError('Unsupported calibration for channel {}: {}' + .format(channel, calibration)) + + +class GOESEUMGEONCFileHandler(BaseFileHandler): + """File handler for GOES Geolocation data in EUM netCDF format""" + def __init__(self, filename, filename_info, filetype_info): + """Initialize the reader.""" + super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, + filetype_info) + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=False, + chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + self.sensor = 'goes_imager' + self.nlines = self.nc.dims['yc'] + self.ncols = self.nc.dims['xc'] + self.platform_name = GOESNCBaseFileHandler._get_platform_name( + self.nc.attrs['Satellite Sensor']) + self.platform_shortname = self.platform_name.replace('-', '').lower() + self._meta = None + + def __getitem__(self, item): + return getattr(self.nc, item) + + def get_dataset(self, key, info): + """Load dataset designated by the given key from file""" + logger.debug('Reading dataset {}'.format(key.name)) + + # Read data from file and calibrate if necessary + if 'longitude' in key.name: + data = self.nc['lon'] + elif 'latitude' in key.name: + data = self.nc['lat'] + else: + raise KeyError("Unknown dataset: {}".format(key.name)) + + # Set proper dimension names + data = data.rename({'xc': 'x', 'yc': 'y'}) + + # Update metadata + data.attrs.update(info) + return data + + class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs""" @@ -1082,332 +1218,6 @@ def _float(self, string): return fac * float(string) -class GOESEUMNCFileHandler(BaseFileHandler): - """File handler for GOES Imager data in EUM netCDF format""" - def __init__(self, filename, filename_info, filetype_info, geo_data): - """Initialize the reader.""" - super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, - filetype_info) - self.nc = xr.open_dataset(self.filename, - decode_cf=True, - mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] - self.platform_name = self._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() - self.gvar_channel = int(self.nc['bands'].values) - self.sector = self._get_sector(channel=self.gvar_channel, - nlines=self.nlines, - ncols=self.ncols) - self._meta = None - - @staticmethod - def _get_platform_name(ncattr): - """Determine name of the platform""" - match = re.match(r'G-(\d+)', ncattr) - if match: - return SPACECRAFTS.get(int(match.groups()[0])) - - return None - - def _get_sector(self, channel, nlines, ncols): - """Determine which sector was scanned""" - if self._is_vis(channel): - margin = 100 - sectors_ref = VIS_SECTORS - else: - margin = 50 - sectors_ref = IR_SECTORS - - for (nlines_ref, ncols_ref), sector in sectors_ref.items(): - if np.fabs(ncols - ncols_ref) < margin and \ - np.fabs(nlines - nlines_ref) < margin: - return sector - - return UNKNOWN_SECTOR - - @staticmethod - def _is_vis(channel): - """Determine whether the given channel is a visible channel""" - if isinstance(channel, str): - return channel == '00_7' - elif isinstance(channel, int): - return channel == 1 - else: - raise ValueError('Invalid channel') - - @property - def meta(self): - """Derive metadata from the coordinates""" - # Use buffered data if available - return self._meta - - def get_dataset(self, key, info): - """Load dataset designated by the given key from file""" - logger.debug('Reading dataset {}'.format(key.name)) - - tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key.calibration, - channel=key.name) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) - - # Mask space pixels - # data = data.where(self.meta['earth_mask']) - - # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) - data = data.drop('time') - - # If the file_type attribute is a list and the data is xarray - # the concat of the dataset will not work. As the file_type is - # not needed this will be popped here. - if 'file_type' in info: - info.pop('file_type') - - # Update metadata - data.attrs.update(info) - data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor} - ) - return data - - def calibrate(self, counts, calibration, channel): - """Perform calibration""" - # Convert 16bit counts from netCDF4 file to the original 10bit - # GVAR counts by dividing by 32. See [FAQ]. - # FIXME quick hack as EUM data already are Albedo or radiance - # counts = counts / 32. - - coefs = CALIB_COEFS[self.platform_name][channel] - if calibration == 'counts': - return counts - elif calibration in ['radiance', 'reflectance', - 'brightness_temperature']: - # FIXME quick hack as the EUM data is already in Albedo - # radiance = self._counts2radiance(counts=counts, coefs=coefs, - # channel=channel) - radiance = counts - if calibration == 'radiance': - return radiance - - return self._calibrate(radiance=radiance, coefs=coefs, - channel=channel, calibration=calibration) - else: - raise ValueError('Unsupported calibration for channel {}: {}' - .format(channel, calibration)) - - def _counts2radiance(self, counts, coefs, channel): - """Convert raw detector counts to radiance""" - logger.debug('Converting counts to radiance') - - if self._is_vis(channel): - # Since the scanline-detector assignment is unknown, use the average - # coefficients for all scanlines. - slope = np.array(coefs['slope']).mean() - offset = np.array(coefs['offset']).mean() - return self._viscounts2radiance(counts=counts, slope=slope, - offset=offset) - - return self._ircounts2radiance(counts=counts, scale=coefs['scale'], - offset=coefs['offset']) - - def _calibrate(self, radiance, coefs, channel, calibration): - """Convert radiance to reflectance or brightness temperature""" - if self._is_vis(channel): - if not calibration == 'reflectance': - raise ValueError('Cannot calibrate VIS channel to ' - '{}'.format(calibration)) - return self._calibrate_vis(radiance=radiance, k=coefs['k']) - else: - if not calibration == 'brightness_temperature': - raise ValueError('Cannot calibrate IR channel to ' - '{}'.format(calibration)) - - # Since the scanline-detector assignment is unknown, use the average - # coefficients for all scanlines. - mean_coefs = {'a': np.array(coefs['a']).mean(), - 'b': np.array(coefs['b']).mean(), - 'n': np.array(coefs['n']).mean(), - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']} - return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) - - @staticmethod - def _ircounts2radiance(counts, scale, offset): - """Convert IR counts to radiance - - Reference: [IR]. - - Args: - counts: Raw detector counts - scale: Scale [mW-1 m2 cm sr] - offset: Offset [1] - - Returns: - Radiance [mW m-2 cm-1 sr-1] - """ - rad = (counts - offset) / scale - return rad.clip(min=0) - - @staticmethod - def _calibrate_ir(radiance, coefs): - """Convert IR radiance to brightness temperature - - Reference: [IR] - - Args: - radiance: Radiance [mW m-2 cm-1 sr-1] - coefs: Dictionary of calibration coefficients. Keys: - n: The channel's central wavenumber [cm-1] - a: Offset [K] - b: Slope [1] - btmin: Minimum brightness temperature threshold [K] - btmax: Maximum brightness temperature threshold [K] - - Returns: - Brightness temperature [K] - """ - logger.debug('Calibrating to brightness temperature') - - # Compute brightness temperature using inverse Planck formula - n = coefs['n'] - bteff = C2 * n / xu.log(1 + C1 * n**3 / radiance.where(radiance > 0)) - bt = (bteff * coefs['b'] + coefs['a']) - # Apply BT threshold - return bt.where(xu.logical_and(bt >= coefs['btmin'], - bt <= coefs['btmax'])) - - @staticmethod - def _viscounts2radiance(counts, slope, offset): - """Convert VIS counts to radiance - - References: [VIS] - - Args: - counts: Raw detector counts - slope: Slope [W m-2 um-1 sr-1] - offset: Offset [W m-2 um-1 sr-1] - Returns: - Radiance [W m-2 um-1 sr-1] - """ - # rad = counts * slope + offset - return counts.clip(min=0) - - @staticmethod - def _calibrate_vis(radiance, k): - """Convert VIS radiance to reflectance - - Note: Angle of incident radiation and annual variation of the - earth-sun distance is not taken into account. A value of 100% - corresponds to the radiance of a perfectly reflecting diffuse surface - illuminated at normal incidence when the sun is at its annual-average - distance from the Earth. - - TODO: Take angle of incident radiation (cos sza) and annual variation - of the earth-sun distance into account. - - Reference: [VIS] - - Args: - radiance: Radiance [mW m-2 cm-1 sr-1] - k: pi / H, where H is the solar spectral irradiance at - annual-average sun-earth distance, averaged over the spectral - response function of the detector). Units of k: [m2 um sr W-1] - Returns: - Reflectance [%] - """ - logger.debug('Calibrating to reflectance') - # FIXME: quick hack commented out here as VIS is aleady calivrated to - # Albdo - # refl = 100 * k * radiance - # refl = k - return radiance.clip(min=0) - - def __del__(self): - try: - self.nc.close() - except (AttributeError, IOError, OSError): - pass - - -class GOESEUMGEONCFileHandler(BaseFileHandler): - """File handler for GOES Geolocation data in EUM netCDF format""" - def __init__(self, filename, filename_info, filetype_info): - """Initialize the reader.""" - super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, - filetype_info) - self.nc = xr.open_dataset(self.filename, - decode_cf=True, - mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] - self.platform_name = self._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() - self._meta = None - - def get_dataset(self, key, info): - """Load dataset designated by the given key from file""" - logger.debug('Reading dataset {}'.format(key.name)) - - # Read data from file and calibrate if necessary - if 'longitude' in key.name: - data = self.nc['lon'] - elif 'latitude' in key.name: - data = self.nc['lat'] - else: - logger.debug("Unknown key.name: ", key.name) - - # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) - - # Update metadata - data.attrs.update(info) - return data - - @staticmethod - def _get_platform_name(ncattr): - """Determine name of the platform""" - match = re.match(r'G-(\d+)', ncattr) - if match: - return SPACECRAFTS.get(int(match.groups()[0])) - - return None - - def _get_sector(self, channel, nlines, ncols): - """Determine which sector was scanned""" - if self._is_vis(channel): - margin = 100 - sectors_ref = VIS_SECTORS - else: - margin = 50 - sectors_ref = IR_SECTORS - - for (nlines_ref, ncols_ref), sector in sectors_ref.items(): - if np.fabs(ncols - ncols_ref) < margin and \ - np.fabs(nlines - nlines_ref) < margin: - return sector - - return UNKNOWN_SECTOR - - @staticmethod - def _is_vis(channel): - """Determine whether the given channel is a visible channel""" - if isinstance(channel, str): - return channel == '00_7' - elif isinstance(channel, int): - return channel == 1 - else: - raise ValueError('Invalid channel') - - def test_coefs(ir_url, vis_url): """Test calibration coefficients against NOAA reference pages From 27ebff59986bbf992da7dfef5d56c7fe966b98ea Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Tue, 4 Dec 2018 09:28:10 +0100 Subject: [PATCH 12/24] Added some comments. Added copyright header --- satpy/readers/nc_goes.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index f18717f6f3..17f649a0e8 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -1,4 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018. + +# Author(s): + +# Stephan Finkensieper +# Trygve Aspenes + +# This file is part of satpy. + +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. + +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + """Reader for GOES 8-15 imager data in netCDF format from NOAA CLASS + Also handles GOES 15 data in netCDF format reformated by Eumetsat GOES Imager netCDF files contain geolocated detector counts. If ordering via NOAA CLASS, select 16 bits/pixel. The instrument oversamples the viewed scene @@ -122,6 +146,16 @@ [FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf [SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html + +Eumetsat formated netCDF data: + +The main differences are: +1: The geolocation is in a separate file, used for all bands +2: VIS data is calibrated to Albedo( or reflectance) +3: IR data is calibrated to radiance. +4: File name differs also slightly +5: Data is received via EumetCast + """ from abc import abstractmethod From a5cc1d4ad568ae98057af8c530597ee41055d530 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Tue, 4 Dec 2018 13:31:40 +0100 Subject: [PATCH 13/24] stickler update --- satpy/readers/nc_goes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 17f649a0e8..9db7297e23 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -559,7 +559,7 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): ncols=self.ncols) self._meta = None self.geo_data = geo_data if geo_data is not None else self.nc - + @abstractmethod def get_dataset(self, key, info): """Load dataset designated by the given key from file""" From c9b1229f36f84336f3f9b5285543568a5de34ffd Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 4 Dec 2018 13:00:10 +0000 Subject: [PATCH 14/24] Make copyright header compliant with #389 --- satpy/readers/nc_goes.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 9db7297e23..30642fa032 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -1,11 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2018. - -# Author(s): - -# Stephan Finkensieper -# Trygve Aspenes +# Copyright (c) 2018 PyTroll developers # This file is part of satpy. From f1dba6b3a51c80edc3eeda7b6c3991752d95c631 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 4 Dec 2018 13:08:19 +0000 Subject: [PATCH 15/24] Add resolution property to geo file handler --- satpy/readers/nc_goes.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 30642fa032..7b217fef0f 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -146,10 +146,11 @@ The main differences are: 1: The geolocation is in a separate file, used for all bands -2: VIS data is calibrated to Albedo( or reflectance) +2: VIS data is calibrated to Albedo (or reflectance) 3: IR data is calibrated to radiance. -4: File name differs also slightly -5: Data is received via EumetCast +4: VIS data is downsampled to IR resolution (4km) +5: File name differs also slightly +6: Data is received via EumetCast """ @@ -1077,6 +1078,14 @@ def get_dataset(self, key, info): data.attrs.update(info) return data + @property + def resolution(self): + """Specify the spatial resolution of the dataset. + + In the EUMETSAT format VIS data is downsampled to IR resolution (4km). + """ + return 4000.0 + class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs""" From 35b998debb82a205114fee438a9e9e51350ae2ab Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 4 Dec 2018 13:44:18 +0000 Subject: [PATCH 16/24] Factorize metadata update This also makes sure the file_type is popped from the data array's attributes, not from the ds_info --- satpy/readers/nc_goes.py | 75 +++++++++++++++++----------------------- 1 file changed, 32 insertions(+), 43 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 7b217fef0f..4a376de1a4 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -882,6 +882,36 @@ def _calibrate_vis(radiance, k): refl = 100 * k * radiance return refl.clip(min=0) + def _update_metadata(self, data, ds_info): + """Update metadata of the given DataArray""" + # Metadata from the dataset definition + data.attrs.update(ds_info) + + # If the file_type attribute is a list and the data is xarray + # the concat of the dataset will not work. As the file_type is + # not needed this will be popped here. + if 'file_type' in data.attrs: + data.attrs.pop('file_type') + + # Metadata discovered from the file + data.attrs.update( + {'platform_name': self.platform_name, + 'sensor': self.sensor, + 'sector': self.sector, + 'yaw_flip': self.meta['yaw_flip']} + ) + if self.meta['lon0'] is not None: + # Attributes only available for full disc images. YAML reader + # doesn't like it if satellite_* is present but None + data.attrs.update( + {'satellite_longitude': self.meta['lon0'], + 'satellite_latitude': self.meta['lat0'], + 'satellite_altitude': ALTITUDE, + 'nadir_row': self.meta['nadir_row'], + 'nadir_col': self.meta['nadir_col'], + 'area_def_uniform_sampling': self.meta['area_def_uni']} + ) + def __del__(self): try: self.nc.close() @@ -919,24 +949,7 @@ def get_dataset(self, key, info): data = data.rename({'xc': 'x', 'yc': 'y'}) # Update metadata - data.attrs.update(info) - data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor, - 'sector': self.sector, - 'yaw_flip': self.meta['yaw_flip']} - ) - if self.meta['lon0'] is not None: - # Attributes only available for full disc images. YAML reader - # doesn't like it if satellite_* is present but None - data.attrs.update( - {'satellite_longitude': self.meta['lon0'], - 'satellite_latitude': self.meta['lat0'], - 'satellite_altitude': ALTITUDE, - 'nadir_row': self.meta['nadir_row'], - 'nadir_col': self.meta['nadir_col'], - 'area_def_uniform_sampling': self.meta['area_def_uni']} - ) + self._update_metadata(data, ds_info=info) return data @@ -968,7 +981,6 @@ class GOESEUMNCFileHandler(GOESNCBaseFileHandler): TODO: Remove datasets which are not available in the file (counts, VIS radiance) via available_datasets() -> See #434 - """ def __init__(self, filename, filename_info, filetype_info, geo_data): """Initialize the reader.""" @@ -992,31 +1004,8 @@ def get_dataset(self, key, info): data = data.rename({'xc': 'x', 'yc': 'y'}) data = data.drop('time') - # If the file_type attribute is a list and the data is xarray - # the concat of the dataset will not work. As the file_type is - # not needed this will be popped here. - if 'file_type' in info: - info.pop('file_type') - # Update metadata - data.attrs.update(info) - data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor, - 'sector': self.sector, - 'yaw_flip': self.meta['yaw_flip']} - ) - if self.meta['lon0'] is not None: - # Attributes only available for full disc images. YAML reader - # doesn't like it if satellite_* is present but None - data.attrs.update( - {'satellite_longitude': self.meta['lon0'], - 'satellite_latitude': self.meta['lat0'], - 'satellite_altitude': ALTITUDE, - 'nadir_row': self.meta['nadir_row'], - 'nadir_col': self.meta['nadir_col'], - 'area_def_uniform_sampling': self.meta['area_def_uni']} - ) + self._update_metadata(data, ds_info=info) return data From d5777bc0750a1918f3c6e72d24d731a68a79d4f4 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 4 Dec 2018 14:09:19 +0000 Subject: [PATCH 17/24] Make update_ds_ids_from_file_handlers work with multiple file types --- satpy/readers/yaml_reader.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index b0a22389b9..f98cc60e83 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -519,7 +519,10 @@ def update_ds_ids_from_file_handlers(self): continue for ds_id, ds_info in list(self.ids.items()): - if fh.filetype_info['file_type'] != ds_info['file_type']: + file_types = ds_info['file_type'] + if not isinstance(file_types, list): + file_types = [file_types] + if fh.filetype_info['file_type'] not in file_types: continue if ds_id.resolution is not None: continue From 13136c5c60274898717fbb3df433c5a2dca996de Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 5 Dec 2018 09:51:43 +0000 Subject: [PATCH 18/24] Separate unit tests --- satpy/tests/reader_tests/test_nc_goes.py | 300 +++++++++++++---------- 1 file changed, 175 insertions(+), 125 deletions(-) diff --git a/satpy/tests/reader_tests/test_nc_goes.py b/satpy/tests/reader_tests/test_nc_goes.py index e912809ffc..79b08f3f1a 100644 --- a/satpy/tests/reader_tests/test_nc_goes.py +++ b/satpy/tests/reader_tests/test_nc_goes.py @@ -20,113 +20,47 @@ import mock -class GOESNCFileHandlerTest(unittest.TestCase): +class GOESNCBaseFileHandlerTest(unittest.TestCase): longMessage = True @mock.patch('satpy.readers.nc_goes.xr') + @mock.patch.multiple('satpy.readers.nc_goes.GOESNCBaseFileHandler', + __abstractmethods__=set()) def setUp(self, xr_): - # Disable logging - logging.getLogger("urllib3").setLevel(logging.WARNING) - - from satpy.readers.nc_goes import GOESNCFileHandler, CALIB_COEFS + from satpy.readers.nc_goes import CALIB_COEFS, GOESNCBaseFileHandler self.coefs = CALIB_COEFS['GOES-15'] - self.all_coefs = CALIB_COEFS - self.channels = sorted(self.coefs.keys()) - self.ir_channels = sorted([ch for ch in self.channels - if not GOESNCFileHandler._is_vis(ch)]) - self.vis_channels = sorted([ch for ch in self.channels - if GOESNCFileHandler._is_vis(ch)]) - # Mock file access to return a fake dataset. Choose a medium count value - # (100) to avoid elements being masked due to invalid - # radiance/reflectance/BT - nrows = ncols = 300 - self.counts = 100 * 32 * np.ones((1, nrows, ncols)) # emulate 10-bit - self.lon = np.zeros((nrows, ncols)) # Dummy - self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( - nrows, ncols) # Includes invalid values to be masked + # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) - - xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), + self.dummy3d = np.zeros((1, 2, 2)) + self.dummy2d = np.zeros((2, 2)) + self.band = 1 + self.nc = xr.Dataset( + {'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')), + 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), + 'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), 'time': xr.DataArray(data=np.array([self.time], dtype='datetime64[ms]'), dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, + 'bands': xr.DataArray(data=np.array([self.band]))}, attrs={'Satellite Sensor': 'G-15'}) - - # Instantiate reader using the mocked open_dataset() method - self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, - filetype_info={}) - - def test_get_dataset_coords(self): - """Test whether coordinates returned by get_dataset() are correct""" - lon = self.reader.get_dataset(key=DatasetID(name='longitude', - calibration=None), - info={}) - lat = self.reader.get_dataset(key=DatasetID(name='latitude', - calibration=None), - info={}) - # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(lat.to_masked_array() == self.lat), - msg='get_dataset() returns invalid latitude') - self.assertTrue(np.all(lon.to_masked_array() == self.lon), - msg='get_dataset() returns invalid longitude') - - def test_get_dataset_counts(self): - """Test whether counts returned by get_dataset() are correct""" - for ch in self.channels: - counts = self.reader.get_dataset( - key=DatasetID(name=ch, calibration='counts'), info={}) - # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), - msg='get_dataset() returns invalid counts for ' - 'channel {}'.format(ch)) - - def test_get_dataset_masks(self): - """Test whether data and coordinates are masked consistently""" - # Requires that no element has been masked due to invalid - # radiance/reflectance/BT (see setUp()). - lon = self.reader.get_dataset(key=DatasetID(name='longitude', - calibration=None), - info={}) - lon_mask = lon.to_masked_array().mask - for ch in self.channels: - for calib in ('counts', 'radiance', 'reflectance', - 'brightness_temperature'): - try: - data = self.reader.get_dataset( - key=DatasetID(name=ch, calibration=calib), info={}) - except ValueError: - continue - data_mask = data.to_masked_array().mask - self.assertTrue(np.all(data_mask == lon_mask), - msg='get_dataset() returns inconsistently ' - 'masked {} in channel {}'.format(calib, ch)) - - def test_get_dataset_invalid(self): - """Test handling of invalid calibrations""" - # VIS -> BT - args = dict(key=DatasetID(name='00_7', - calibration='brightness_temperature'), - info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) - - # IR -> Reflectance - args = dict(key=DatasetID(name='10_7', - calibration='reflectance'), - info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) - - # Unsupported calibration - args = dict(key=DatasetID(name='10_7', - calibration='invalid'), - info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) + xr_.open_dataset.return_value = self.nc + + # Instantiate reader using the mocked open_dataset() method. Also, make + # the reader believe all abstract methods have been implemented. + self.reader = GOESNCBaseFileHandler(filename='dummy', filename_info={}, + filetype_info={}) + + def test_init(self): + """Tests reader initialization""" + self.assertEqual(self.reader.nlines, self.dummy2d.shape[0]) + self.assertEqual(self.reader.ncols, self.dummy2d.shape[1]) + self.assertEqual(self.reader.platform_name, 'GOES-15') + self.assertEqual(self.reader.platform_shortname, 'goes15') + self.assertEqual(self.reader.gvar_channel, self.band) + self.assertIsInstance(self.reader.geo_data, xr.Dataset) def test_get_nadir_pixel(self): """Test identification of the nadir pixel""" @@ -163,21 +97,6 @@ def test_is_yaw_flip(self): self.assertEqual(self.reader._is_yaw_flip(lat_dsc, delta=1), False, msg='Yaw flip false alarm') - def test_calibrate(self): - """Test whether the correct calibration methods are called""" - for ch in self.channels: - if self.reader._is_vis(ch): - calibs = {'radiance': '_viscounts2radiance', - 'reflectance': '_calibrate_vis'} - else: - calibs = {'radiance': '_ircounts2radiance', - 'brightness_temperature': '_calibrate_ir'} - for calib, method in calibs.items(): - with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(counts=self.reader.nc['data'], - calibration=calib, channel=ch) - target_func.assert_called() - def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance""" # Reference data is for detector #1 @@ -269,22 +188,6 @@ def test_calibrate_ir(self): msg='Incorrect conversion from radiance to brightness ' 'temperature in channel {} detector {}'.format(ch, det)) - def test_start_time(self): - """Test dataset start time stamp""" - self.assertEqual(self.reader.start_time, self.time) - - def test_end_time(self): - """Test dataset end time stamp""" - from satpy.readers.nc_goes import (SCAN_DURATION, FULL_DISC, - UNKNOWN_SECTOR) - expected = { - UNKNOWN_SECTOR: self.time, - FULL_DISC: self.time + SCAN_DURATION[FULL_DISC] - } - for sector, end_time in expected.items(): - self.reader.sector = sector - self.assertEqual(self.reader.end_time, end_time) - def test_get_sector(self): """Test sector identification""" from satpy.readers.nc_goes import (FULL_DISC, NORTH_HEMIS_EAST, @@ -318,12 +221,159 @@ def test_get_sector(self): self.assertEqual(sector, sector_ref, msg='Incorrect sector identification') + def test_start_time(self): + """Test dataset start time stamp""" + self.assertEqual(self.reader.start_time, self.time) + + def test_end_time(self): + """Test dataset end time stamp""" + from satpy.readers.nc_goes import (SCAN_DURATION, FULL_DISC, + UNKNOWN_SECTOR) + expected = { + UNKNOWN_SECTOR: self.time, + FULL_DISC: self.time + SCAN_DURATION[FULL_DISC] + } + for sector, end_time in expected.items(): + self.reader.sector = sector + self.assertEqual(self.reader.end_time, end_time) + + +class GOESNCFileHandlerTest(unittest.TestCase): + + longMessage = True + + @mock.patch('satpy.readers.nc_goes.xr') + def setUp(self, xr_): + from satpy.readers.nc_goes import GOESNCFileHandler, CALIB_COEFS + + self.coefs = CALIB_COEFS['GOES-15'] + self.all_coefs = CALIB_COEFS + self.channels = sorted(self.coefs.keys()) + self.ir_channels = sorted([ch for ch in self.channels + if not GOESNCFileHandler._is_vis(ch)]) + self.vis_channels = sorted([ch for ch in self.channels + if GOESNCFileHandler._is_vis(ch)]) + + # Mock file access to return a fake dataset. Choose a medium count value + # (100) to avoid elements being masked due to invalid + # radiance/reflectance/BT + nrows = ncols = 300 + self.counts = 100 * 32 * np.ones((1, nrows, ncols)) # emulate 10-bit + self.lon = np.zeros((nrows, ncols)) # Dummy + self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( + nrows, ncols) # Includes invalid values to be masked + + xr_.open_dataset.return_value = xr.Dataset( + {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), + 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), + 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), + 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), + dims=('time',)), + 'bands': xr.DataArray(data=np.array([1]))}, + attrs={'Satellite Sensor': 'G-15'}) + + # Instantiate reader using the mocked open_dataset() method + self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, + filetype_info={}) + + def test_get_dataset_coords(self): + """Test whether coordinates returned by get_dataset() are correct""" + lon = self.reader.get_dataset(key=DatasetID(name='longitude', + calibration=None), + info={}) + lat = self.reader.get_dataset(key=DatasetID(name='latitude', + calibration=None), + info={}) + # ... this only compares the valid (unmasked) elements + self.assertTrue(np.all(lat.to_masked_array() == self.lat), + msg='get_dataset() returns invalid latitude') + self.assertTrue(np.all(lon.to_masked_array() == self.lon), + msg='get_dataset() returns invalid longitude') + + def test_get_dataset_counts(self): + """Test whether counts returned by get_dataset() are correct""" + for ch in self.channels: + counts = self.reader.get_dataset( + key=DatasetID(name=ch, calibration='counts'), info={}) + # ... this only compares the valid (unmasked) elements + self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), + msg='get_dataset() returns invalid counts for ' + 'channel {}'.format(ch)) + + def test_get_dataset_masks(self): + """Test whether data and coordinates are masked consistently""" + # Requires that no element has been masked due to invalid + # radiance/reflectance/BT (see setUp()). + lon = self.reader.get_dataset(key=DatasetID(name='longitude', + calibration=None), + info={}) + lon_mask = lon.to_masked_array().mask + for ch in self.channels: + for calib in ('counts', 'radiance', 'reflectance', + 'brightness_temperature'): + try: + data = self.reader.get_dataset( + key=DatasetID(name=ch, calibration=calib), info={}) + except ValueError: + continue + data_mask = data.to_masked_array().mask + self.assertTrue(np.all(data_mask == lon_mask), + msg='get_dataset() returns inconsistently ' + 'masked {} in channel {}'.format(calib, ch)) + + def test_get_dataset_invalid(self): + """Test handling of invalid calibrations""" + # VIS -> BT + args = dict(key=DatasetID(name='00_7', + calibration='brightness_temperature'), + info={}) + self.assertRaises(ValueError, self.reader.get_dataset, **args) + + # IR -> Reflectance + args = dict(key=DatasetID(name='10_7', + calibration='reflectance'), + info={}) + self.assertRaises(ValueError, self.reader.get_dataset, **args) + + # Unsupported calibration + args = dict(key=DatasetID(name='10_7', + calibration='invalid'), + info={}) + self.assertRaises(ValueError, self.reader.get_dataset, **args) + + def test_calibrate(self): + """Test whether the correct calibration methods are called""" + for ch in self.channels: + if self.reader._is_vis(ch): + calibs = {'radiance': '_viscounts2radiance', + 'reflectance': '_calibrate_vis'} + else: + calibs = {'radiance': '_ircounts2radiance', + 'brightness_temperature': '_calibrate_ir'} + for calib, method in calibs.items(): + with mock.patch.object(self.reader, method) as target_func: + self.reader.calibrate(counts=self.reader.nc['data'], + calibration=calib, channel=ch) + target_func.assert_called() + + +class GOESNCEUMFileHandlerTest(unittest.TestCase): + def test_get_dataset(self): + # TODO + pass + + def test_calibrate(self): + # TODO + pass + def suite(): """Test suite for GOES netCDF reader""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() + mysuite.addTest(loader.loadTestsFromTestCase(GOESNCBaseFileHandlerTest)) mysuite.addTest(loader.loadTestsFromTestCase(GOESNCFileHandlerTest)) + mysuite.addTest(loader.loadTestsFromTestCase(GOESNCEUMFileHandlerTest)) return mysuite From fe01c77d284f95d24827206cf6bdf88127021da0 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 5 Dec 2018 13:40:31 +0000 Subject: [PATCH 19/24] Add test for update_ds_ids_from_file_handlers --- satpy/tests/test_yaml_reader.py | 74 +++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 0cbfedcc30..6387ef4649 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -489,6 +489,78 @@ def test_load_entire_dataset(self, xarray): self.assertIs(proj, xarray.concat.return_value) +class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): + """Test units from FileYAMLReader with multiple file types.""" + + @patch('satpy.readers.yaml_reader.recursive_dict_update') + @patch('satpy.readers.yaml_reader.yaml', spec=yr.yaml) + def setUp(self, _, rec_up): # pylint: disable=arguments-differ + """Setup a reader instance with a fake config.""" + # Example: GOES netCDF data + # a) From NOAA CLASS: ftype1, including coordinates + # b) From EUMETSAT: ftype2, coordinates in extra file (ftype3) + # + # For test completeness add one channel (ch3) which is only available + # in ftype1. + patterns1 = ['a.nc'] + patterns2 = ['b.nc'] + patterns3 = ['geo.nc'] + res_dict = {'reader': {'name': 'fake', + 'sensors': ['canon']}, + 'file_types': {'ftype1': {'name': 'ft1', + 'file_patterns': patterns1}, + 'ftype2': {'name': 'ft2', + 'file_patterns': patterns2}, + 'ftype3': {'name': 'ft3', + 'file_patterns': patterns3}}, + 'datasets': {'ch1': {'name': 'ch01', + 'wavelength': [0.5, 0.6, 0.7], + 'calibration': 'reflectance', + 'file_type': ['ftype1', 'ftype2'], + 'coordinates': ['lons', 'lats']}, + 'ch2': {'name': 'ch02', + 'wavelength': [0.7, 0.75, 0.8], + 'calibration': 'counts', + 'file_type': ['ftype1', 'ftype2'], + 'coordinates': ['lons', 'lats']}, + 'ch3': {'name': 'ch03', + 'wavelength': [0.8, 0.85, 0.9], + 'calibration': 'counts', + 'file_type': 'ftype1', + 'coordinates': ['lons', 'lats']}, + 'lons': {'name': 'lons', + 'file_type': ['ftype1', 'ftype3']}, + 'lats': {'name': 'lats', + 'file_type': ['ftype1', 'ftype3']}}} + + rec_up.return_value = res_dict + self.config = res_dict + self.reader = yr.FileYAMLReader([__file__]) + + def test_update_ds_ids_from_file_handlers(self): + """Test updating existing dataset IDs with information from the file""" + orig_ids = self.reader.ids + for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)): + with patch.dict(self.reader.ids, orig_ids, clear=True): + # Add a file handler with resolution property + self.reader.file_handlers = { + ftype: [MagicMock(filetype_info={'file_type': ftype}, + resolution=resol)]} + + # Update existing dataset IDs with resolution property from + # the file handler + self.reader.update_ds_ids_from_file_handlers() + + # Make sure the resolution property has been transferred + # correctly from the file handler to the dataset ID + for ds_id, ds_info in self.reader.ids.items(): + file_types = ds_info['file_type'] + if not isinstance(file_types, list): + file_types = [file_types] + expected = resol if ftype in file_types else None + self.assertEqual(ds_id.resolution, expected) + + def suite(): """The test suite for test_scene.""" loader = unittest.TestLoader() @@ -497,6 +569,8 @@ def suite(): mysuite.addTest(loader.loadTestsFromTestCase(TestFileFileYAMLReader)) mysuite.addTest(loader.loadTestsFromTestCase( TestFileFileYAMLReaderMultiplePatterns)) + mysuite.addTest(loader.loadTestsFromTestCase( + TestFileFileYAMLReaderMultipleFileTypes)) return mysuite From bde37b71de3b35a1e39b5ea3369b0ba27858f03e Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 5 Dec 2018 16:06:53 +0000 Subject: [PATCH 20/24] Fix sector identification in EUM reader --- satpy/readers/nc_goes.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/satpy/readers/nc_goes.py b/satpy/readers/nc_goes.py index 4a376de1a4..69a9e0afd5 100644 --- a/satpy/readers/nc_goes.py +++ b/satpy/readers/nc_goes.py @@ -566,6 +566,16 @@ def calibrate(self, data, calibration, channel): """Perform calibration""" raise NotImplementedError + @property + @abstractmethod + def vis_sectors(self): + raise NotImplementedError + + @property + @abstractmethod + def ir_sectors(self): + raise NotImplementedError + @staticmethod def _get_platform_name(ncattr): """Determine name of the platform""" @@ -579,10 +589,10 @@ def _get_sector(self, channel, nlines, ncols): """Determine which sector was scanned""" if self._is_vis(channel): margin = 100 - sectors_ref = VIS_SECTORS + sectors_ref = self.vis_sectors else: margin = 50 - sectors_ref = IR_SECTORS + sectors_ref = self.ir_sectors for (nlines_ref, ncols_ref), sector in sectors_ref.items(): if np.fabs(ncols - ncols_ref) < margin and \ @@ -921,6 +931,10 @@ def __del__(self): class GOESNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in netCDF format""" + + vis_sectors = VIS_SECTORS + ir_sectors = IR_SECTORS + def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESNCFileHandler, self).__init__(filename, filename_info, @@ -982,6 +996,10 @@ class GOESEUMNCFileHandler(GOESNCBaseFileHandler): TODO: Remove datasets which are not available in the file (counts, VIS radiance) via available_datasets() -> See #434 """ + + vis_sectors = IR_SECTORS # VIS channel is downsampled to IR resolution + ir_sectors = IR_SECTORS + def __init__(self, filename, filename_info, filetype_info, geo_data): """Initialize the reader.""" super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, From 7856dfad5a4c1354a97cbd8b59604c467439923b Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Thu, 6 Dec 2018 14:52:05 +0100 Subject: [PATCH 21/24] Added tests for the EUM NC formated part --- satpy/tests/reader_tests/test_nc_goes.py | 112 +++++++++++++++++++++-- 1 file changed, 105 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_nc_goes.py b/satpy/tests/reader_tests/test_nc_goes.py index 79b08f3f1a..46d3e7999b 100644 --- a/satpy/tests/reader_tests/test_nc_goes.py +++ b/satpy/tests/reader_tests/test_nc_goes.py @@ -357,15 +357,112 @@ def test_calibrate(self): target_func.assert_called() -class GOESNCEUMFileHandlerTest(unittest.TestCase): - def test_get_dataset(self): - # TODO - pass +class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): + longMessage = True + + @mock.patch('satpy.readers.nc_goes.xr') + def setUp(self, xr_): + from satpy.readers.nc_goes import GOESEUMNCFileHandler, CALIB_COEFS + + self.coefs = CALIB_COEFS['GOES-15'] + self.all_coefs = CALIB_COEFS + self.channels = sorted(self.coefs.keys()) + self.ir_channels = sorted([ch for ch in self.channels + if not GOESEUMNCFileHandler._is_vis(ch)]) + self.vis_channels = sorted([ch for ch in self.channels + if GOESEUMNCFileHandler._is_vis(ch)]) + + # Mock file access to return a fake dataset. + nrows = ncols = 300 + self.radiance = np.ones((1, nrows, ncols)) # IR channels + self.lon = np.zeros((nrows, ncols)) # Dummy + self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( + nrows, ncols) # Includes invalid values to be masked + + xr_.open_dataset.return_value = xr.Dataset( + {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')), + 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), + dims=('time',)), + 'bands': xr.DataArray(data=np.array([1]))}, + attrs={'Satellite Sensor': 'G-15'}) + + geo_data = xr.Dataset( + {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), + 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, + attrs={'Satellite Sensor': 'G-15'}) + + # Instantiate reader using the mocked open_dataset() method + self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + filetype_info={}, geo_data=geo_data) + + def test_get_dataset_radiance(self): + for ch in self.channels: + if not self.reader._is_vis(ch): + radiance = self.reader.get_dataset( + key=DatasetID(name=ch, calibration='radiance'), info={}) + # ... this only compares the valid (unmasked) elements + self.assertTrue(np.all(self.radiance == radiance.to_masked_array()), + msg='get_dataset() returns invalid radiance for ' + 'channel {}'.format(ch)) def test_calibrate(self): - # TODO - pass + """Test whether the correct calibration methods are called""" + for ch in self.channels: + if not self.reader._is_vis(ch): + calibs = {'brightness_temperature': '_calibrate_ir'} + for calib, method in calibs.items(): + with mock.patch.object(self.reader, method) as target_func: + self.reader.calibrate(data=self.reader.nc['data'], + calibration=calib, channel=ch) + target_func.assert_called() + +class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): + longMessage = True + + @mock.patch('satpy.readers.nc_goes.xr') + def setUp(self, xr_): + from satpy.readers.nc_goes import GOESEUMNCFileHandler, CALIB_COEFS + self.coefs = CALIB_COEFS['GOES-15'] + self.all_coefs = CALIB_COEFS + self.channels = sorted(self.coefs.keys()) + self.ir_channels = sorted([ch for ch in self.channels + if not GOESEUMNCFileHandler._is_vis(ch)]) + self.vis_channels = sorted([ch for ch in self.channels + if GOESEUMNCFileHandler._is_vis(ch)]) + + # Mock file access to return a fake dataset. + nrows = ncols = 300 + self.reflectance = 50 * np.ones((1, nrows, ncols)) # Vis channel + self.lon = np.zeros((nrows, ncols)) # Dummy + self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( + nrows, ncols) # Includes invalid values to be masked + + xr_.open_dataset.return_value = xr.Dataset( + {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')), + 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), + dims=('time',)), + 'bands': xr.DataArray(data=np.array([1]))}, + attrs={'Satellite Sensor': 'G-15'}) + + geo_data = xr.Dataset( + {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), + 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, + attrs={'Satellite Sensor': 'G-15'}) + + # Instantiate reader using the mocked open_dataset() method + self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + filetype_info={}, geo_data=geo_data) + + def test_get_dataset_reflectance(self): + for ch in self.channels: + if self.reader._is_vis(ch): + refl = self.reader.get_dataset( + key=DatasetID(name=ch, calibration='reflectance'), info={}) + # ... this only compares the valid (unmasked) elements + self.assertTrue(np.all(self.reflectance == refl.to_masked_array()), + msg='get_dataset() returns invalid reflectance for ' + 'channel {}'.format(ch)) def suite(): """Test suite for GOES netCDF reader""" @@ -373,7 +470,8 @@ def suite(): mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(GOESNCBaseFileHandlerTest)) mysuite.addTest(loader.loadTestsFromTestCase(GOESNCFileHandlerTest)) - mysuite.addTest(loader.loadTestsFromTestCase(GOESNCEUMFileHandlerTest)) + mysuite.addTest(loader.loadTestsFromTestCase(GOESNCEUMFileHandlerRadianceTest)) + mysuite.addTest(loader.loadTestsFromTestCase(GOESNCEUMFileHandlerReflectanceTest)) return mysuite From 0cb5aeeebbab120bec923ce4032d6469371420c5 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Thu, 6 Dec 2018 14:56:52 +0100 Subject: [PATCH 22/24] Stickler --- satpy/tests/reader_tests/test_nc_goes.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_nc_goes.py b/satpy/tests/reader_tests/test_nc_goes.py index 46d3e7999b..3dd9e7986a 100644 --- a/satpy/tests/reader_tests/test_nc_goes.py +++ b/satpy/tests/reader_tests/test_nc_goes.py @@ -394,7 +394,7 @@ def setUp(self, xr_): # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, filetype_info={}, geo_data=geo_data) - + def test_get_dataset_radiance(self): for ch in self.channels: if not self.reader._is_vis(ch): @@ -416,6 +416,7 @@ def test_calibrate(self): calibration=calib, channel=ch) target_func.assert_called() + class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): longMessage = True From 499a013deced51f5ad4eb9cff28e284948e357c2 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 6 Dec 2018 15:06:26 +0000 Subject: [PATCH 23/24] Move test_get_sector from baseclass to subclasses --- satpy/tests/reader_tests/test_nc_goes.py | 89 +++++++++++++++--------- 1 file changed, 55 insertions(+), 34 deletions(-) diff --git a/satpy/tests/reader_tests/test_nc_goes.py b/satpy/tests/reader_tests/test_nc_goes.py index 3dd9e7986a..36cd7140f3 100644 --- a/satpy/tests/reader_tests/test_nc_goes.py +++ b/satpy/tests/reader_tests/test_nc_goes.py @@ -26,7 +26,8 @@ class GOESNCBaseFileHandlerTest(unittest.TestCase): @mock.patch('satpy.readers.nc_goes.xr') @mock.patch.multiple('satpy.readers.nc_goes.GOESNCBaseFileHandler', - __abstractmethods__=set()) + __abstractmethods__=set(), + _get_sector=mock.MagicMock()) def setUp(self, xr_): from satpy.readers.nc_goes import CALIB_COEFS, GOESNCBaseFileHandler @@ -188,39 +189,6 @@ def test_calibrate_ir(self): msg='Incorrect conversion from radiance to brightness ' 'temperature in channel {} detector {}'.format(ch, det)) - def test_get_sector(self): - """Test sector identification""" - from satpy.readers.nc_goes import (FULL_DISC, NORTH_HEMIS_EAST, - SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST, - SOUTH_HEMIS_WEST, UNKNOWN_SECTOR) - shapes_vis = { - (10800, 20754): FULL_DISC, - (7286, 13900): NORTH_HEMIS_EAST, - (2301, 13840): SOUTH_HEMIS_EAST, - (5400, 13200): NORTH_HEMIS_WEST, - (4300, 11090): SOUTH_HEMIS_WEST, - (123, 456): UNKNOWN_SECTOR - } - shapes_ir = { - (2700, 5200): FULL_DISC, - (1850, 3450): NORTH_HEMIS_EAST, - (600, 3500): SOUTH_HEMIS_EAST, - (1310, 3300): NORTH_HEMIS_WEST, - (1099, 2800): SOUTH_HEMIS_WEST, - (123, 456): UNKNOWN_SECTOR - } - shapes = shapes_ir.copy() - shapes.update(shapes_vis) - for (nlines, ncols), sector_ref in shapes.items(): - if (nlines, ncols) in shapes_vis: - channel = '00_7' - else: - channel = '10_7' - sector = self.reader._get_sector(channel=channel, nlines=nlines, - ncols=ncols) - self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') - def test_start_time(self): """Test dataset start time stamp""" self.assertEqual(self.reader.start_time, self.time) @@ -356,6 +324,39 @@ def test_calibrate(self): calibration=calib, channel=ch) target_func.assert_called() + def test_get_sector(self): + """Test sector identification""" + from satpy.readers.nc_goes import (FULL_DISC, NORTH_HEMIS_EAST, + SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST, + SOUTH_HEMIS_WEST, UNKNOWN_SECTOR) + shapes_vis = { + (10800, 20754): FULL_DISC, + (7286, 13900): NORTH_HEMIS_EAST, + (2301, 13840): SOUTH_HEMIS_EAST, + (5400, 13200): NORTH_HEMIS_WEST, + (4300, 11090): SOUTH_HEMIS_WEST, + (123, 456): UNKNOWN_SECTOR + } + shapes_ir = { + (2700, 5200): FULL_DISC, + (1850, 3450): NORTH_HEMIS_EAST, + (600, 3500): SOUTH_HEMIS_EAST, + (1310, 3300): NORTH_HEMIS_WEST, + (1099, 2800): SOUTH_HEMIS_WEST, + (123, 456): UNKNOWN_SECTOR + } + shapes = shapes_ir.copy() + shapes.update(shapes_vis) + for (nlines, ncols), sector_ref in shapes.items(): + if (nlines, ncols) in shapes_vis: + channel = '00_7' + else: + channel = '10_7' + sector = self.reader._get_sector(channel=channel, nlines=nlines, + ncols=ncols) + self.assertEqual(sector, sector_ref, + msg='Incorrect sector identification') + class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): longMessage = True @@ -416,6 +417,26 @@ def test_calibrate(self): calibration=calib, channel=ch) target_func.assert_called() + def test_get_sector(self): + """Test sector identification""" + from satpy.readers.nc_goes import (FULL_DISC, NORTH_HEMIS_EAST, + SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST, + SOUTH_HEMIS_WEST, UNKNOWN_SECTOR) + shapes = { + (2700, 5200): FULL_DISC, + (1850, 3450): NORTH_HEMIS_EAST, + (600, 3500): SOUTH_HEMIS_EAST, + (1310, 3300): NORTH_HEMIS_WEST, + (1099, 2800): SOUTH_HEMIS_WEST, + (123, 456): UNKNOWN_SECTOR + } + for (nlines, ncols), sector_ref in shapes.items(): + for channel in ('00_7', '10_7'): + sector = self.reader._get_sector(channel=channel, nlines=nlines, + ncols=ncols) + self.assertEqual(sector, sector_ref, + msg='Incorrect sector identification') + class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): longMessage = True From 4263fa63609be266797786ae0e6b9d4943af87f6 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 7 Dec 2018 09:54:21 +0100 Subject: [PATCH 24/24] flake8 --- satpy/tests/reader_tests/test_nc_goes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_nc_goes.py b/satpy/tests/reader_tests/test_nc_goes.py index 36cd7140f3..a41df6a07d 100644 --- a/satpy/tests/reader_tests/test_nc_goes.py +++ b/satpy/tests/reader_tests/test_nc_goes.py @@ -486,6 +486,7 @@ def test_get_dataset_reflectance(self): msg='get_dataset() returns invalid reflectance for ' 'channel {}'.format(ch)) + def suite(): """Test suite for GOES netCDF reader""" loader = unittest.TestLoader()