Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
3d0760e
FIX fix cloning
dantegd Jan 14, 2025
ca66732
Merge branch 'branch-25.02' into fix-interop-fixes
dantegd Feb 7, 2025
497d181
FIX changes from PR review to not use internal sklearn APIs and mate …
dantegd Feb 14, 2025
dce1539
Merge cuML branch-25.04
dantegd Feb 14, 2025
0d797aa
Merge branch 'branch-25.04' into fix-interop-fixes
dantegd Feb 14, 2025
0ef4895
ENH Keep list of original hyperparams that user passed
dantegd Feb 14, 2025
a00af9d
Merge branch 'fix-interop-fixes' of github.com:dantegd/cuml into fix-…
dantegd Feb 14, 2025
4e70f4c
FIX remove unused imported function
dantegd Feb 14, 2025
de3e234
Check that get_params and cloning work
betatim Feb 14, 2025
f14a14b
Typo fix
betatim Feb 20, 2025
5a02fd0
ENH multiple improvements by using the cpu_model as the reference tru…
dantegd Feb 21, 2025
e0cd0d5
FIX style fixes
dantegd Feb 21, 2025
d206bb8
Merge cuML branch-25.04
dantegd Feb 21, 2025
e70c3fb
Merge branch 'branch-25.04' into fix-interop-fixes
dantegd Feb 21, 2025
3785c4e
DOC correct docstrings
dantegd Feb 21, 2025
fd17f09
Merge branch 'fix-interop-fixes' of github.com:dantegd/cuml into fix-…
dantegd Feb 21, 2025
e7a35a1
Move imports to the top
betatim Feb 21, 2025
742404e
Fix style
betatim Feb 21, 2025
fa41369
FIX multiple fixes from issues raised in PR review
dantegd Feb 24, 2025
4fb4982
FIX skip pytest for cuml.accel output type when accel is not active
dantegd Feb 24, 2025
b2c31b2
FIX small test fixes
dantegd Feb 24, 2025
5a5bf59
Merge cuML branch-25.04
dantegd Feb 24, 2025
8eeb5b7
FIX fix from a bad merge
dantegd Feb 24, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions python/cuml/cuml/cluster/hdbscan/hdbscan.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -771,7 +771,7 @@ class HDBSCAN(UniversalBase, ClusterMixin, CMajorInputTagMixin):
"""
Fit HDBSCAN model from features.
"""

self._all_finite = True
X_m, n_rows, n_cols, self.dtype = \
input_to_cuml_array(X, order='C',
check_dtype=[np.float32],
Expand Down Expand Up @@ -1163,7 +1163,7 @@ class HDBSCAN(UniversalBase, ClusterMixin, CMajorInputTagMixin):
def get_attr_names(self):
attr_names = ['labels_', 'probabilities_', 'cluster_persistence_',
'condensed_tree_', 'single_linkage_tree_',
'outlier_scores_']
'outlier_scores_', '_all_finite']
if self.gen_min_span_tree:
attr_names = attr_names + ['minimum_spanning_tree_']
if self.prediction_data:
Expand Down
35 changes: 28 additions & 7 deletions python/cuml/cuml/experimental/accel/estimator_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,14 +207,29 @@ def __init__(self, *args, **kwargs):
self._cpu_model_class = (
original_class_a # Store a reference to the original class
)
signature = inspect.signature(self._cpu_model_class.__init__)

# Keep the original passed hyperparameters so that we know what
# to use in get_params, and also can do later conversions if we
# need.
self._cpu_hyperparams_dict = {
name: param.default
for name, param in signature.parameters.items()
if name != "self"
}
self._cpu_hyperparams_dict.update(kwargs)

kwargs, self._gpuaccel = self._hyperparam_translator(**kwargs)
super().__init__(*args, **kwargs)

self._cpu_hyperparams = list(
inspect.signature(
self._cpu_model_class.__init__
).parameters.keys()
)
# _cpu_hyperparams is expected to be a list in the UniversalBase
# methods.
self._cpu_hyperparams = list(self._cpu_hyperparams_dict.keys())

# Importing and building the model here ensures a more uniform
# behavior of when do we import CPU classes.
self.import_cpu_model()
self.build_cpu_model()

def __repr__(self):
"""
Expand All @@ -226,7 +241,10 @@ def __repr__(self):
A string representation indicating that this is a wrapped
version of the original CPU-based estimator.
"""
return f"wrapped {self._cpu_model_class}"
self.import_cpu_model()
self.build_cpu_model()
self.gpu_to_cpu()
return self._cpu_model.__repr__()

def __str__(self):
"""
Expand All @@ -238,7 +256,10 @@ def __str__(self):
A string representation indicating that this is a wrapped
version of the original CPU-based estimator.
"""
return f"ProxyEstimator of {self._cpu_model_class}"
self.import_cpu_model()
self.build_cpu_model()
self.gpu_to_cpu()
return self._cpu_model.__str__()

def __getstate__(self):
"""
Expand Down
30 changes: 30 additions & 0 deletions python/cuml/cuml/internals/base.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -889,6 +889,12 @@ class UniversalBase(Base):
# that are not in the cuML estimator in the host estimator
if GlobalSettings().accelerator_active or self._experimental_dispatching:

# we don't want to special sklearn dispatch cloning function
# so that cloning works with this class as a regular estimator
# without __sklearn_clone__
if attr == "__sklearn_clone__":
raise ex

self.import_cpu_model()
if hasattr(self._cpu_model_class, attr):

Expand Down Expand Up @@ -988,3 +994,27 @@ class UniversalBase(Base):
estimator.output_mem_type = MemoryType.host

return estimator

def get_params(self, deep=True):
"""
If accelerator is active, we return the params of the CPU estimator
being helf by the class, otherwise we just call the regular
Comment thread
betatim marked this conversation as resolved.
Outdated
get_params of the Base class.
"""
Comment thread
dantegd marked this conversation as resolved.
if GlobalSettings().accelerator_active or self._experimental_dispatching:
return self._cpu_hyperparams_dict
else:
return super().get_params(deep=deep)

def set_params(self, **params):
"""
For setting parameters, when the accelerator is active, we translate
the parameters to set the GPU params, and also update the
params of the CPU class. Otherwise dispatching to the CPU class after
updating params of the GPU estimator will dispatch to an estimator
with outdated params.
"""
self._cpu_hyperparams_dict.update(params)
Comment thread
dantegd marked this conversation as resolved.
Outdated
params, gpuaccel = super._hyperparam_translator(params)
super().set_params(params)
return self