|
1 | 1 | """Tests for the H-SAF NC reader.""" |
2 | 2 | import datetime as dt |
| 3 | +import io |
3 | 4 | import os |
| 5 | +from collections import namedtuple |
4 | 6 | from unittest import mock |
5 | 7 |
|
6 | 8 | import numpy as np |
|
35 | 37 | } |
36 | 38 | } |
37 | 39 |
|
| 40 | +# Avoid too many arguments for test_load_datasets |
| 41 | +LoadDatasetsParams = namedtuple( |
| 42 | + "LoadDatasetsParams", |
| 43 | + ["file_type", "loadable_ids", "unit", "resolution", "area_name"] |
| 44 | +) |
| 45 | + |
38 | 46 | # constants for fake test data |
39 | 47 | DEFAULT_SHAPE = (5, 5) |
40 | 48 | rng = np.random.default_rng() |
@@ -78,48 +86,55 @@ def setup_method(self): |
78 | 86 | ) |
79 | 87 | def test_reader_creation(self, file_type, expected_loadables): |
80 | 88 | """Test that the reader can create file handlers.""" |
81 | | - with mock.patch("satpy.readers.hsaf_nc.xr.open_dataset") as od: |
82 | | - od.side_effect = fake_hsaf_dataset |
| 89 | + with mock.patch("satpy.readers.hsaf_nc.generic_open") as mock_generic_open, \ |
| 90 | + mock.patch("satpy.readers.hsaf_nc.xr.open_dataset") as mock_open_dataset: |
| 91 | + |
| 92 | + mock_generic_open.return_value.__enter__.return_value = io.BytesIO(b"fake data") |
| 93 | + mock_open_dataset.side_effect = fake_hsaf_dataset |
83 | 94 |
|
84 | 95 | loadables = file_type["reader"].select_files_from_pathnames([file_type["fake_file"]]) |
85 | | - file_type["reader"].create_filehandlers(loadables) |
| 96 | + file_handlers = file_type["reader"].create_filehandlers(loadables) |
86 | 97 |
|
87 | | - assert len(loadables) == expected_loadables |
| 98 | + assert len(file_handlers) == expected_loadables |
88 | 99 | assert file_type["reader"].file_handlers, "No file handlers created" |
89 | 100 |
|
90 | 101 | @pytest.mark.parametrize( |
91 | | - ("file_type", "loadable_ids", "unit", "resolution", "area_name"), |
| 102 | + "params", |
92 | 103 | [ |
93 | | - (FILE_PARAMS[FILE_TYPE_H60], ["rr", "qind"], "mm/h", 3000, "msg_seviri_fes_3km"), |
94 | | - (FILE_PARAMS[FILE_TYPE_H63], ["rr", "qind"], "mm/h", 3000, "msg_seviri_iodc_3km"), |
95 | | - (FILE_PARAMS[FILE_TYPE_H90], ["acc_rr", "qind"], "mm", 3000, "msg_seviri_iodc_3km"), |
| 104 | + LoadDatasetsParams(FILE_PARAMS[FILE_TYPE_H60], ["rr", "qind"], "mm/h", 3000, "msg_seviri_fes_3km"), |
| 105 | + LoadDatasetsParams(FILE_PARAMS[FILE_TYPE_H63], ["rr", "qind"], "mm/h", 3000, "msg_seviri_iodc_3km"), |
| 106 | + LoadDatasetsParams(FILE_PARAMS[FILE_TYPE_H90], ["acc_rr", "qind"], "mm", 3000, "msg_seviri_iodc_3km"), |
96 | 107 | ], |
97 | 108 | ) |
98 | | - def test_load_datasets(self, file_type, loadable_ids, unit, resolution, area_name): |
| 109 | + def test_load_datasets(self, params): |
99 | 110 | """Test that datasets can be loaded correctly.""" |
100 | | - with mock.patch("satpy.readers.hsaf_nc.xr.open_dataset") as od: |
101 | | - od.side_effect = fake_hsaf_dataset |
102 | | - loadables = file_type["reader"].select_files_from_pathnames([file_type["fake_file"]]) |
103 | | - file_type["reader"].create_filehandlers(loadables) |
| 111 | + with mock.patch("satpy.readers.hsaf_nc.generic_open") as mock_generic_open, \ |
| 112 | + mock.patch("satpy.readers.hsaf_nc.xr.open_dataset") as mock_open_dataset: |
104 | 113 |
|
105 | | - datasets = file_type["reader"].load(loadable_ids) |
| 114 | + mock_generic_open.return_value.__enter__.return_value = io.BytesIO(b"fake data") |
| 115 | + mock_open_dataset.side_effect = fake_hsaf_dataset |
| 116 | + |
| 117 | + loadables = params.file_type["reader"].select_files_from_pathnames([params.file_type["fake_file"]]) |
| 118 | + params.file_type["reader"].create_filehandlers(loadables) |
| 119 | + |
| 120 | + datasets = params.file_type["reader"].load(params.loadable_ids) |
106 | 121 | dataset_names = {d["name"] for d in datasets.keys()} |
107 | | - assert dataset_names == set(loadable_ids) |
| 122 | + assert dataset_names == set(params.loadable_ids) |
108 | 123 |
|
109 | 124 | # check array shapes and types |
110 | | - assert datasets[loadable_ids[0]].shape == DEFAULT_SHAPE |
111 | | - assert datasets[loadable_ids[1]].shape == DEFAULT_SHAPE |
112 | | - assert np.issubdtype(datasets[loadable_ids[0]].dtype, np.floating) |
113 | | - assert np.issubdtype(datasets[loadable_ids[1]].dtype, np.integer) |
| 125 | + assert datasets[params.loadable_ids[0]].shape == DEFAULT_SHAPE |
| 126 | + assert datasets[params.loadable_ids[1]].shape == DEFAULT_SHAPE |
| 127 | + assert np.issubdtype(datasets[params.loadable_ids[0]].dtype, np.floating) |
| 128 | + assert np.issubdtype(datasets[params.loadable_ids[1]].dtype, np.integer) |
114 | 129 |
|
115 | | - data = datasets[loadable_ids[0]] |
| 130 | + data = datasets[params.loadable_ids[0]] |
116 | 131 | assert data.attrs["spacecraft_name"] == "Meteosat-8" |
117 | 132 | assert data.attrs["platform_name"] == "Meteosat-8" |
118 | | - assert data.attrs["units"] == unit |
119 | | - assert data.attrs["resolution"] == resolution |
| 133 | + assert data.attrs["units"] == params.unit |
| 134 | + assert data.attrs["resolution"] == params.resolution |
120 | 135 | assert data.attrs["start_time"] == dt.datetime(2025, 11, 5, 0, 0) |
121 | 136 | assert data.attrs["end_time"] == dt.datetime(2025, 11, 5, 0, 15) |
122 | | - assert data.attrs["area"].area_id == area_name |
| 137 | + assert data.attrs["area"].area_id == params.area_name |
123 | 138 | assert data.dims == ("y", "x") |
124 | 139 |
|
125 | 140 |
|
@@ -152,8 +167,12 @@ def test_real_hsaf_file(self, file_type, loadable_ids): |
152 | 167 |
|
153 | 168 | def test_get_area_def(self): |
154 | 169 | """Test that the loaded dataset has a AreaDefinition and overwrite of lon_0 of the area works correctly.""" |
155 | | - with mock.patch("satpy.readers.hsaf_nc.xr.open_dataset") as od: |
156 | | - od.side_effect = fake_hsaf_dataset |
| 170 | + with mock.patch("satpy.readers.hsaf_nc.generic_open") as mock_generic_open, \ |
| 171 | + mock.patch("satpy.readers.hsaf_nc.xr.open_dataset") as mock_open_dataset: |
| 172 | + |
| 173 | + mock_generic_open.return_value.__enter__.return_value = io.BytesIO(b"fake data") |
| 174 | + mock_open_dataset.side_effect = fake_hsaf_dataset |
| 175 | + |
157 | 176 | file_type = FILE_PARAMS[FILE_TYPE_H63] |
158 | 177 | loadables = file_type["reader"].select_files_from_pathnames([file_type["fake_file"]]) |
159 | 178 | file_type["reader"].create_filehandlers(loadables) |
|
0 commit comments