Skip to content
Closed
Show file tree
Hide file tree
Changes from 36 commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
dcc4bfe
import the unittest test suite for quantities
keewis May 12, 2019
f6925be
make sure no divide by zero occurs
keewis May 12, 2019
ef13531
use asanyarray instead of asarray in as_compatible_data
keewis May 12, 2019
b373ecf
preserve ndarray subclasses with the data accessor
keewis May 12, 2019
a2aced1
now the sel test passes, too, so don't xfail it
keewis May 12, 2019
97683a4
remove the last divide-by-zero possibility
keewis May 12, 2019
2ece12b
add quantities to some of the requirements files
keewis May 12, 2019
7a25fb6
rename the test file to match the name of the original test file
keewis May 12, 2019
4348e0b
remove trailing whitespace
keewis May 12, 2019
dbeaed8
fix a typo
keewis May 13, 2019
f792478
replace the single data fixture with multiple smaller ones
keewis May 13, 2019
b2e3ae2
add a test for combining data arrays
keewis May 13, 2019
453c693
replace the requires_quantities decorator with skipif on module level
keewis May 13, 2019
0d4b543
convert the test methods from the namespace class to functions
keewis May 13, 2019
8beaf76
also check that units on the data itself survive
keewis May 13, 2019
b4d4288
fix the order of imports
keewis May 19, 2019
1ad1d6d
assert in the comparison function instead of asserting the result
keewis May 20, 2019
2b654a5
use data creation helpers instead of data fixtures
keewis May 20, 2019
c52bdf4
add an option to switch on the support for subclasses
keewis May 20, 2019
92e62b3
modify duck_array_ops.asarray to work like asanyarray if enabled
keewis May 20, 2019
280abf3
add a function that uses asanyarray instead of asarray if the option …
keewis May 20, 2019
24d2771
use the new asarray function instead of using options directly
keewis May 20, 2019
2ea846e
explicitly convert matrix objects to ndarrays
keewis May 20, 2019
5a4db0c
wrap the option name and validator lines
keewis May 20, 2019
9809596
add tests to ensure the matrix and MaskedArray classes get converted
keewis May 20, 2019
b4cab61
fix the indentation of a parenthesis
keewis May 20, 2019
6f398e5
fix the line length of a decorator call
keewis May 20, 2019
54522e3
Merge commit 'f172c673' into member-arrays-with-units
keewis Aug 19, 2019
ee15176
black
keewis Aug 19, 2019
3bc5c5c
black2
keewis Aug 19, 2019
c1e513a
Merge commit 'd089df38' into member-arrays-with-units
keewis Aug 19, 2019
5477bca
Merge branch 'master' into member-arrays-with-units
keewis Aug 19, 2019
c787809
move the function deciding between asarray and asanyarray to npcompat
keewis Aug 19, 2019
c653eaa
make sure the original arrays are used as comparison
keewis Aug 19, 2019
5aee870
isort
keewis Aug 19, 2019
c2944c5
allow passing custom arrays to the helper functions
keewis Aug 20, 2019
8e7d7ce
create the test data in the tests to increase the readability
keewis Aug 20, 2019
25f5800
Merge branch 'master' into member-arrays-with-units
keewis Aug 20, 2019
e13e273
black
keewis Aug 21, 2019
a89a1e5
reuse the coordinate dict
keewis Aug 21, 2019
fe6a799
ignore the missing type annotations for quantities
keewis Aug 22, 2019
c4d8512
Merge branch 'master' into member-arrays-with-units
keewis Aug 26, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ci/requirements/py36.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,4 @@ dependencies:
- iris>=1.10
- pydap
- lxml

- quantities
1 change: 1 addition & 0 deletions ci/requirements/py37.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,6 @@ dependencies:
- cfgrib>=0.9.2
- lxml
- pydap
- quantities
- pip:
- numbagg
4 changes: 3 additions & 1 deletion xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,10 +162,12 @@ def trapz(y, x, axis):


def asarray(data):
from .npcompat import _asarray

return (
data
if (isinstance(data, dask_array_type) or hasattr(data, "__array_function__"))
else np.asarray(data)
else _asarray(data)
)


Expand Down
13 changes: 12 additions & 1 deletion xarray/core/npcompat.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,14 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import builtins
import operator
from distutils.version import LooseVersion
from typing import Union

import operator
import numpy as np

from .options import OPTIONS

try:
from numpy import isin
except ImportError:
Expand Down Expand Up @@ -378,3 +380,12 @@ def __array_function__(self, *args, **kwargs):


IS_NEP18_ACTIVE = _is_nep18_active()


def _asarray(data):
# options get set after import, so this needs to be done in a
# function
if OPTIONS["enable_experimental_ndarray_subclass_support"]:
return np.asanyarray(data)
else:
return np.asarray(data)
8 changes: 8 additions & 0 deletions xarray/core/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@
CMAP_SEQUENTIAL = "cmap_sequential"
CMAP_DIVERGENT = "cmap_divergent"
KEEP_ATTRS = "keep_attrs"
ENABLE_EXPERIMENTAL_NDARRAY_SUBCLASS_SUPPORT = (
"enable_experimental_ndarray_subclass_support"
)


OPTIONS = {
Expand All @@ -19,6 +22,7 @@
CMAP_SEQUENTIAL: "viridis",
CMAP_DIVERGENT: "RdBu_r",
KEEP_ATTRS: "default",
ENABLE_EXPERIMENTAL_NDARRAY_SUBCLASS_SUPPORT: False,
}

_JOIN_OPTIONS = frozenset(["inner", "outer", "left", "right", "exact"])
Expand All @@ -35,6 +39,7 @@ def _positive_integer(value):
FILE_CACHE_MAXSIZE: _positive_integer,
WARN_FOR_UNCLOSED_FILES: lambda value: isinstance(value, bool),
KEEP_ATTRS: lambda choice: choice in [True, False, "default"],
ENABLE_EXPERIMENTAL_NDARRAY_SUBCLASS_SUPPORT: lambda value: isinstance(value, bool),
}


Expand Down Expand Up @@ -98,6 +103,9 @@ class set_options:
attrs, ``False`` to always discard them, or ``'default'`` to use original
logic that attrs should only be kept in unambiguous circumstances.
Default: ``'default'``.
- ``enable_experimental_ndarray_subclass_support``: whether or not
to enable the support for subclasses of numpy's ndarray.
Default: ``False``.

You can use ``set_options`` either as a context manager:

Expand Down
24 changes: 20 additions & 4 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@
VectorizedIndexer,
as_indexable,
)
from .npcompat import IS_NEP18_ACTIVE, _asarray
from .options import _get_keep_attrs
from .pycompat import dask_array_type, integer_types
from .npcompat import IS_NEP18_ACTIVE
from .utils import (
OrderedSet,
decode_numpy_dict_values,
Expand Down Expand Up @@ -193,6 +193,8 @@ def as_compatible_data(data, fastpath=False):
data[mask] = fill_value
else:
data = np.asarray(data)
elif isinstance(data, np.matrix):
data = np.asarray(data)

if not isinstance(data, np.ndarray):
if hasattr(data, "__array_function__"):
Expand All @@ -208,8 +210,7 @@ def as_compatible_data(data, fastpath=False):
)

# validate whether the data is valid data types
data = np.asarray(data)

data = _asarray(data)
if isinstance(data, np.ndarray):
if data.dtype.kind == "O":
data = _possibly_convert_objects(data)
Expand Down Expand Up @@ -244,6 +245,21 @@ def _as_array_or_item(data):
return data


def _as_any_array_or_item(data):
""" Return the given values as a numpy array subclass instance, or
individual item if it's a 0d datetime64 or timedelta64 array.

The same caveats as for ``_as_array_or_item`` apply.
"""
data = _asarray(data)
if data.ndim == 0:
if data.dtype.kind == "M":
data = np.datetime64(data, "ns")
elif data.dtype.kind == "m":
data = np.timedelta64(data, "ns")
return data


class Variable(
common.AbstractArray, arithmetic.SupportsArithmetic, utils.NdimSizeLenMixin
):
Expand Down Expand Up @@ -322,7 +338,7 @@ def data(self):
):
return self._data
else:
return self.values
return _as_any_array_or_item(self._data)

@data.setter
def data(self, data):
Expand Down
160 changes: 160 additions & 0 deletions xarray/tests/test_quantities.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
import numpy as np
import pytest

from xarray import DataArray, set_options

try:
import quantities as pq

has_quantities = True
except ImportError:
has_quantities = False

pytestmark = pytest.mark.skipif(not has_quantities, reason="requires python-quantities")


set_options(enable_experimental_ndarray_subclass_support=True)


def assert_equal_with_units(a, b):
a = a if not isinstance(a, DataArray) else a.data
b = b if not isinstance(b, DataArray) else b.data

assert (hasattr(a, "units") and hasattr(b, "units")) and a.units == b.units

assert (hasattr(a, "magnitude") and hasattr(b, "magnitude")) and np.allclose(
a.magnitude, b.magnitude
)


def create_data():
return (np.arange(10 * 20).reshape(10, 20) + 1) * pq.V


def create_coord_arrays():
x = (np.arange(10) + 1) * pq.A
y = np.arange(20) + 1
xp = (np.arange(10) + 1) * pq.J
return x, y, xp


def create_coords(x=None, y=None, xp=None):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You write these in whichever format works best for you; don't take this as a dictum to change. But:

You might find that writing these using pytest fixtures makes the code nicer, and is easier to write

Here, you'd

  • add @pytest.fixture to this function,
  • rename it to coords,
  • and then any test function you wanted access to its result you'd add its name like def test_units_in_data_and_coords(coords):

You can have fixtures taking data from other fixtures, so you can still have the chain you're creating here, and you get parameterization for free!

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

well, that was what I did before, but when I created this PR, I was told in #2956 (comment) that data fixtures were adding complexity to the tests so I should either create the data in the tests themselves or use helper functions.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think it particularly matters whether you write fixtures or helper functions. My point was more about trying to keep test-specific inputs and assertions as close to each other as possible, so you can read a test without constantly referring to a helper function that defines the inputs.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I hadn't seen that, sorry you're caught in the middle there @keewis.

For this case, you can do it how you wish; I think @shoyer 's emphasis was on the locality rather than the format - so no benefit from create_x over an x fixture

(then we can all have a separate discussion about locality vs generality without holding this work up)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should I then leave it as is or rewrite it so that the data is created in each individual test? None of these actually need all of data, dimensions and coordinates as subclass instances. Actually, they are almost the same as the ones in the original PR. I would be happy with both, though I do think rewriting may actually make the tests a little bit easier to understand.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Assuming it's as straight swap between create_X and a fixture X (i.e. no change to the locality), I think they would be better as pytest fixtures. They're acceptable as either, though.

@shoyer what are your thoughts about the case at hand?

Copy link
Collaborator Author

@keewis keewis Aug 20, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What I meant was that at the moment I have a single 2d array subclass as data and both dimensions and an extra coordinate are also subclass instances. The dimensions (and the coordinate) only matter in the tests they are used in, so most of the data arrays used could be in place created 1d arrays.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I modified the tests to create the data in place.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK great! Cheers @keewis

x_, y_, xp_ = create_coord_arrays()
if x is None:
x = x_
if y is None:
y = y_
if xp is None:
xp = xp_

coords = dict(x=x, y=y, xp=(["x"], xp))
return coords


def create_data_array(data=None, coords=None):
if data is None:
data = create_data()
if coords is None:
coords = create_coords()

return DataArray(data, dims=("x", "y"), coords=coords)


def with_keys(mapping, keys):
return {key: value for key, value in mapping.items() if key in keys}


def test_without_subclass_support():
with set_options(enable_experimental_ndarray_subclass_support=False):
data_array = create_data_array()
assert not hasattr(data_array.data, "units")


@pytest.mark.filterwarnings("ignore:the matrix subclass:PendingDeprecationWarning")
def test_matrix():
matrix = np.matrix([[1, 2], [3, 4]])
da = DataArray(matrix)

assert not isinstance(da.data, np.matrix)


def test_masked_array():
masked = np.ma.array([[1, 2], [3, 4]], mask=[[0, 1], [1, 0]])
da = DataArray(masked)
assert not isinstance(da.data, np.ma.MaskedArray)


def test_units_in_data_and_coords():
data = create_data()
_, _, xp = create_coord_arrays()
data_array = create_data_array(data=data, coords=create_coords(xp=xp))

assert_equal_with_units(data, data_array)
assert_equal_with_units(xp, data_array.xp)


def test_arithmetics():
v = create_data()
coords = create_coords()
da = create_data_array(data=v, coords=coords)

f = np.arange(10 * 20).reshape(10, 20) * pq.A
g = DataArray(f, dims=["x", "y"], coords=with_keys(coords, ["x", "y"]))
assert_equal_with_units(da * g, v * f)

# swapped dimension order
f = np.arange(20 * 10).reshape(20, 10) * pq.V
g = DataArray(f, dims=["y", "x"], coords=with_keys(coords, ["x", "y"]))
assert_equal_with_units(da + g, v + f.T)

# broadcasting
f = (np.arange(10) + 1) * pq.m
g = DataArray(f, dims=["x"], coords=with_keys(coords, ["x"]))
assert_equal_with_units(da / g, v / f[:, None])


@pytest.mark.xfail(reason="units don't survive through combining yet")
def test_combine():
from xarray import concat

data_array = create_data_array()

a = data_array[:, :10]
b = data_array[:, 10:]

assert_equal_with_units(concat([a, b], dim="y"), data_array)


def test_unit_checking():
coords = create_coords()
da = create_data_array(coords=coords)

f = np.arange(10 * 20).reshape(10, 20) * pq.A
g = DataArray(f, dims=["x", "y"], coords=with_keys(coords, ["x", "y"]))
with pytest.raises(ValueError, match="Unable to convert between units"):
da + g


@pytest.mark.xfail(reason="units in indexes not supported")
def test_units_in_indexes():
""" Test if units survive through xarray indexes.
Indexes are borrowed from Pandas, and Pandas does not support
units. Therefore, we currently don't intend to support units on
indexes either.
"""
x, *_ = create_coord_arrays()
data_array = create_data_array(coords=create_coords(x=x))
assert_equal_with_units(data_array.x, x)


def test_sel():
data = create_data()
_, y, _ = create_coord_arrays()
data_array = create_data_array(data=data, coords=create_coords(y=y))
assert_equal_with_units(data_array.sel(y=y[0]), data[:, 0])


def test_mean():
data = create_data()
data_array = create_data_array(data=data)
assert_equal_with_units(data_array.mean("x"), data.mean(0))