Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 42 additions & 3 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,20 @@ jobs:
MATRIX=$(jq -nsc '{
"os": ["ubuntu-latest", "macos-latest", "windows-latest"],
"python-version": ["3.9", "3.10", "3.11", "3.12"],
"numpy-version": ["1"],
"exclude": [{
"os": "windows-latest",
"python-version": "3.9"
}],
"include": [{
"os": "ubuntu-latest",
"python-version": "3.12",
"numpy-version": "2"
},
{
"os": "macos-latest",
"python-version": "3.12",
"numpy-version": "2"
}]
}')
# Won't solve on Windows + Python 3.9
Expand All @@ -80,9 +91,20 @@ jobs:
MATRIX=$(jq -nsc '{
"os": ["ubuntu-latest", "macos-latest", "windows-latest"],
"python-version": ["3.9", "3.10", "3.11", "3.12"],
"numpy-version": ["1"],
"exclude": [{
"os": "windows-latest",
"python-version": "3.9"
}],
"include": [{
"os": "ubuntu-latest",
"python-version": "3.12",
"numpy-version": "2"
},
{
"os": "macos-latest",
"python-version": "3.12",
"numpy-version": "2"
}]
}')
# Won't solve on Windows + Python 3.9
Expand All @@ -92,12 +114,13 @@ jobs:
run: |
MATRIX=$(jq -nsc '{
"os": ["ubuntu-latest"],
"numpy-version": ["1"],
"python-version": ["3.11"]
}')
echo "MATRIX=$MATRIX" >> $GITHUB_ENV

test_suite:
name: Tests on ${{ matrix.os }} with Python ${{ matrix.python-version }}
name: Tests on ${{ matrix.os }} with Python ${{ matrix.python-version }}, numpy ${{ matrix.numpy-version }}
needs: [pre_commit, setup]
runs-on: ${{ matrix.os }}
strategy:
Expand All @@ -110,16 +133,32 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Set channels and envs
run: |
if [[ ${{ matrix.numpy-version }} == "2" ]]; then
channels="pyviz/label/dev,conda-forge/label/numpy_rc,numba/label/dev,conda-forge,nodefaults"
envs="-o numpy2"
else
channels="pyviz/label/dev,numba,conda-forge,nodefaults"
envs="-o tests -o examples"
fi
echo "CHANNELS=$channels" >> $GITHUB_ENV
echo "ENVS=$envs" >> $GITHUB_ENV
- uses: holoviz-dev/holoviz_tasks/install@v0
with:
name: unit_test_suite
python-version: ${{ matrix.python-version }}
channel-priority: flexible
channels: pyviz/label/dev,numba,conda-forge,nodefaults
envs: "-o tests -o examples"
channels: ${{ env.CHANNELS }}
envs: ${{ env.ENVS }}
cache: ${{ github.event.inputs.cache || github.event.inputs.cache == '' }}
conda-update: true
id: install
- name: check version
run: |
conda activate test-environment
python -c "import numba; print('Numba', numba.__version__)"
python -c "import numpy; print('Numpy', numpy.__version__)"
- name: doit test_lint
if: runner.os == 'Linux'
run: |
Expand Down
4 changes: 2 additions & 2 deletions datashader/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,10 +845,10 @@ def quadmesh(self, source, x=None, y=None, agg=None):
xaxis_linear = self.x_axis is _axis_lookup["linear"]
yaxis_linear = self.y_axis is _axis_lookup["linear"]
even_yspacing = np.allclose(
yarr, np.linspace(yarr[0], yarr[-1], len(yarr))
yarr, np.linspace(yarr[0].data, yarr[-1].data, len(yarr))
)
even_xspacing = np.allclose(
xarr, np.linspace(xarr[0], xarr[-1], len(xarr))
xarr, np.linspace(xarr[0].data, xarr[-1].data, len(xarr))
)

if xaxis_linear and yaxis_linear and even_xspacing and even_yspacing:
Expand Down
2 changes: 1 addition & 1 deletion datashader/datashape/coretypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,7 @@ def from_numpy_dtype(self, dt):
ctype("int64")
>>> CType.from_numpy_dtype(dtype('M8'))
DateTime(tz=None)
>>> CType.from_numpy_dtype(dtype('U30'))
>>> CType.from_numpy_dtype(dtype('U30')) # doctest: +SKIP
ctype("string[30, 'U32']")
"""
try:
Expand Down
2 changes: 1 addition & 1 deletion datashader/datashape/discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def deltaparse(x):
Examples
--------
>>> td = '1 day'
>>> deltaparse(td)
>>> deltaparse(td) # doctest: +SKIP
numpy.timedelta64(1,'D')
>>> deltaparse('1.2 days') # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
Expand Down
13 changes: 8 additions & 5 deletions datashader/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -606,8 +606,8 @@ def _concat_same_type(cls, to_concat):

# offset and concat start_indices
offsets = np.hstack([
[0],
np.cumsum([len(ra.flat_array) for ra in to_concat[:-1]])])
[0], np.cumsum([len(ra.flat_array) for ra in to_concat[:-1]])
]).astype('uint64')

start_indices = np.hstack([ra.start_indices + offset
for offset, ra in zip(offsets, to_concat)])
Expand Down Expand Up @@ -636,7 +636,7 @@ def astype(self, dtype, copy=True):
return dtype.construct_array_type()._from_sequence(
np.asarray(self))

return np.array([v for v in self], dtype=dtype, copy=copy)
return np.array([v for v in self], dtype=dtype)

def tolist(self):
# Based on pandas ExtensionArray.tolist
Expand All @@ -645,9 +645,12 @@ def tolist(self):
else:
return list(self)

def __array__(self, dtype=None):
def __array__(self, dtype=None, copy=True):
dtype = np.dtype(object) if dtype is None else np.dtype(dtype)
return np.asarray(self.tolist(), dtype=dtype)
if copy:
return np.array(self.tolist(), dtype=dtype)
else:
return np.array(self, dtype=dtype)

def duplicated(self, *args, **kwargs):
msg = "duplicated is not implemented for RaggedArray"
Expand Down
2 changes: 1 addition & 1 deletion datashader/transfer_functions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def eq_hist(data, mask=None, nbins=256*256):
# Run more accurate value counting if data is of boolean or integer type
# and unique value array is smaller than nbins.
if data2.dtype == bool or (array_module.issubdtype(data2.dtype, array_module.integer) and
data2.ptp() < nbins):
array_module.ptp(data2) < nbins):
values, counts = array_module.unique(data2, return_counts=True)
vmin, vmax = values[0].item(), values[-1].item() # Convert from arrays to scalars.
interval = vmax-vmin
Expand Down
4 changes: 3 additions & 1 deletion datashader/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,9 @@ def _pd_mesh(vertices, simplices):
winding = [0, 1, 2]
first_tri = vertices.values[simplices.values[0, winding].astype(np.int64), :2]
a, b, c = first_tri
if np.cross(b-a, c-a).item() >= 0:
p1, p2 = b - a, c - a
cross_product = p1[0] * p2[1] - p1[1] * p2[0]
if cross_product >= 0:
winding = [0, 2, 1]

# Construct mesh by indexing into vertices with simplex indices
Expand Down
13 changes: 13 additions & 0 deletions examples/conftest.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,20 @@
from importlib.util import find_spec

import platform

collect_ignore_glob = []

if find_spec("geopandas") is None:
collect_ignore_glob += [
"user_guide/13_Geopandas.ipynb",
]

if find_spec("spatialpandas") is None:
collect_ignore_glob += [
"user_guide/7_Networks.ipynb",
"user_guide/8_Polygons.ipynb",
]

# 2023-07-21 with following error:
# nbclient.exceptions.CellTimeoutError: A cell timed out while it was being executed, after 300 seconds.
# Here is a preview of the cell contents:
Expand Down
39 changes: 39 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,44 @@
'spatialpandas',
]

# Numpy 2 packages, should be removed when all commented out packages works with Numpy 2
numpy2 = [
'numba ==0.60.0rc1',
'numpy ==2.0.0rc2',

# [geopandas]
# 'dask-geopandas',
# 'geopandas',
# 'shapely >=2.0.0',

# [examples]
'bokeh >3.1',
'geodatasets',
'holoviews',
'matplotlib >=3.3',
'panel >1.1',
# 'scikit-image',
# 'spatialpandas',

# [tests]
'codecov',
'geodatasets',
'flake8',
'nbconvert',
'nbformat',
'nbsmoke[verify] >0.5',
'netcdf4',
# 'pyarrow',
'pytest <8', # Fails lint with IPynbFile is deprecated
'pytest-benchmark',
'pytest-cov',
# 'rasterio',
# 'rioxarray', # rasterio
# 'scikit-image',
# 'spatialpandas',
# 'dask-expr', # pyarrow
]

extras_require = {
'tests': geopandas + [
'codecov',
Expand Down Expand Up @@ -71,6 +109,7 @@
'rasterio',
],
'geopandas': geopandas,
'numpy2': numpy2,
}


Expand Down