Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ Enhancements
"dayofyear" and "dayofweek" accessors (:issue:`2597`). By `Spencer Clark
<https://github.com/spencerkclark>`_.
- Support Dask ``HighLevelGraphs`` by `Matthew Rocklin <https://matthewrocklin.com>`_.

- :py:meth:`DataArray.resample` and :py:meth:`Dataset.resample` now supports the
``loffset`` kwarg just like Pandas.
By `Deepak Cherian <https://github.com/dcherian>`_
Expand Down
13 changes: 9 additions & 4 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,17 +338,22 @@ def maybe_decode_store(store, lock=False):
else:
raise ValueError('unrecognized engine for open_dataset: %r'
% engine)

with close_on_error(store):
return maybe_decode_store(store)
else:
if engine is not None and engine != 'scipy':
raise ValueError('can only read file-like objects with '
"default engine or engine='scipy'")
# assume filename_or_obj is a file-like object
store = backends.ScipyDataStore(filename_or_obj)

return maybe_decode_store(store)
with close_on_error(store):
ds = maybe_decode_store(store)

# Ensure source filename always stored in dataset object (GH issue #2550)
if 'source' not in ds.encoding.keys():
if isinstance(filename_or_obj, basestring):
ds.encoding['source'] = filename_or_obj

return ds


def open_dataarray(filename_or_obj, group=None, decode_cf=True,
Expand Down
11 changes: 11 additions & 0 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -3356,3 +3356,14 @@ def test_no_warning_from_dask_effective_get():
ds = Dataset()
ds.to_netcdf(tmpfile)
assert len(record) == 0


@requires_scipy_or_netCDF4
def test_source_encoding_always_present():
# Test for GH issue #2550.
rnddata = np.random.randn(10)
original = Dataset({'foo': ('x', rnddata)})
with create_tmp_file() as tmp:
original.to_netcdf(tmp)
with open_dataset(tmp) as ds:
assert ds.encoding['source'] == tmp