|
22 | 22 | assert_identical, |
23 | 23 | raises_regex, |
24 | 24 | ) |
| 25 | +from .test_backends import create_tmp_file |
25 | 26 |
|
26 | 27 | dask = pytest.importorskip("dask") |
27 | 28 | da = pytest.importorskip("dask.array") |
@@ -1135,3 +1136,57 @@ def test_make_meta(map_ds): |
1135 | 1136 | for variable in map_ds.data_vars: |
1136 | 1137 | assert variable in meta.data_vars |
1137 | 1138 | assert meta.data_vars[variable].shape == (0,) * meta.data_vars[variable].ndim |
| 1139 | + |
| 1140 | + |
| 1141 | +@pytest.mark.parametrize("obj", [make_da(), make_ds()]) |
| 1142 | +@pytest.mark.parametrize( |
| 1143 | + "transform", |
| 1144 | + [ |
| 1145 | + lambda x: x.reset_coords(), |
| 1146 | + lambda x: x.reset_coords(drop=True), |
| 1147 | + lambda x: x.isel(x=1), |
| 1148 | + lambda x: x.attrs.update(new_attrs=1), |
| 1149 | + lambda x: x.assign_coords(cxy=1), |
| 1150 | + lambda x: x.rename({"x": "xnew"}), |
| 1151 | + lambda x: x.rename({"cxy": "cxynew"}), |
| 1152 | + ], |
| 1153 | +) |
| 1154 | +def test_normalize_token_not_identical(obj, transform): |
| 1155 | + with raise_if_dask_computes(): |
| 1156 | + assert not dask.base.tokenize(obj) == dask.base.tokenize(transform(obj)) |
| 1157 | + assert not dask.base.tokenize(obj.compute()) == dask.base.tokenize( |
| 1158 | + transform(obj.compute()) |
| 1159 | + ) |
| 1160 | + |
| 1161 | + |
| 1162 | +@pytest.mark.parametrize("transform", [lambda x: x, lambda x: x.compute()]) |
| 1163 | +def test_normalize_differently_when_data_changes(transform): |
| 1164 | + obj = transform(make_ds()) |
| 1165 | + new = obj.copy(deep=True) |
| 1166 | + new["a"] *= 2 |
| 1167 | + with raise_if_dask_computes(): |
| 1168 | + assert not dask.base.tokenize(obj) == dask.base.tokenize(new) |
| 1169 | + |
| 1170 | + obj = transform(make_da()) |
| 1171 | + new = obj.copy(deep=True) |
| 1172 | + new *= 2 |
| 1173 | + with raise_if_dask_computes(): |
| 1174 | + assert not dask.base.tokenize(obj) == dask.base.tokenize(new) |
| 1175 | + |
| 1176 | + |
| 1177 | +@pytest.mark.parametrize( |
| 1178 | + "transform", [lambda x: x, lambda x: x.copy(), lambda x: x.copy(deep=True)] |
| 1179 | +) |
| 1180 | +@pytest.mark.parametrize( |
| 1181 | + "obj", [make_da(), make_ds(), make_da().indexes["x"], make_ds().variables["a"]] |
| 1182 | +) |
| 1183 | +def test_normalize_token_identical(obj, transform): |
| 1184 | + with raise_if_dask_computes(): |
| 1185 | + assert dask.base.tokenize(obj) == dask.base.tokenize(transform(obj)) |
| 1186 | + |
| 1187 | + |
| 1188 | +def test_normalize_token_netcdf_backend(map_ds): |
| 1189 | + with create_tmp_file() as tmp_file: |
| 1190 | + map_ds.to_netcdf(tmp_file) |
| 1191 | + read = xr.open_dataset(tmp_file) |
| 1192 | + assert not dask.base.tokenize(map_ds) == dask.base.tokenize(read) |
0 commit comments