Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 55 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,61 @@ You can install the `museval` parsing package using pip:
pip install museval
```

### GPU Acceleration (Optional)

`museval` now supports optional GPU acceleration using NVIDIA CUDA. This can significantly speed up evaluation for large datasets or long audio files.

#### Requirements
- NVIDIA GPU with CUDA support
- CUDA Toolkit installed (version 11.x or 12.x)

#### Installation with GPU support

```bash
# For CUDA 11.x
pip install museval[gpu]

# For CUDA 12.x, install CuPy manually first:
pip install cupy-cuda12x
pip install museval
```

#### Usage with GPU

GPU acceleration can be enabled in several ways:

**1. Via function parameter:**
```python
import museval

scores = museval.metrics.bss_eval(
reference_sources,
estimated_sources,
backend='cupy' # Use GPU acceleration
)
```

**2. Via environment variable:**
```bash
export MUSEVAL_BACKEND=cupy
python your_evaluation_script.py
```

**3. Auto mode (default):**
By default, `backend='auto'` will use NumPy (CPU). Set the environment variable to enable GPU globally.

#### Performance Considerations

GPU acceleration is most beneficial for:
- Large number of sources (4+)
- Long audio files (> 30 seconds)
- Batch processing multiple tracks
- Framewise evaluation with many windows

For smaller inputs, CPU may be faster due to data transfer overhead.

If GPU is not available or CuPy is not installed, `museval` will automatically fall back to CPU computation with a warning.

## Usage

The purpose of this package is to evaluate source separation results and write out validated `json` files. We want to encourage users to use this evaluation output format as the standardized way to share source separation results. `museval` is designed to work in conjuction with the [musdb](https://github.com/sigsep/sigsep-mus-db) tools and the MUSDB18 dataset (however, `museval` can also be used without `musdb`).
Expand Down
140 changes: 140 additions & 0 deletions examples/gpu_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
"""Example demonstrating GPU acceleration for BSS evaluation.

This script shows how to use the GPU backend for faster evaluation
of source separation metrics.
"""
import numpy as np
import museval.metrics as metrics
import time


def check_gpu_available():
"""Check if GPU is available."""
try:
import cupy as cp
return cp.cuda.is_available()
except (ImportError, RuntimeError):
return False


def generate_test_data(nsrc=4, duration_sec=10, sample_rate=44100, nchan=2):
"""Generate random test data for evaluation."""
nsampl = duration_sec * sample_rate

# Generate random sources with some structure
np.random.seed(42)
reference = np.random.randn(nsrc, nsampl, nchan) * 0.5

# Generate estimates that are somewhat correlated with reference
estimated = np.random.randn(nsrc, nsampl, nchan) * 0.5
estimated += reference * 0.3 # Add correlation

return reference, estimated


def evaluate_with_timing(reference, estimated, backend='numpy', description=''):
"""Evaluate and time the computation."""
print(f"\n{description}")
print("-" * 60)

start = time.time()
sdr, isr, sir, sar, perm = metrics.bss_eval(
reference, estimated,
window=np.inf,
hop=np.inf,
compute_permutation=False,
backend=backend
)
elapsed = time.time() - start

print(f"Time: {elapsed:.3f} seconds")
print(f"SDR: {sdr.mean():.2f} dB")
print(f"ISR: {isr.mean():.2f} dB")
print(f"SIR: {sir.mean():.2f} dB")
print(f"SAR: {sar.mean():.2f} dB")

return elapsed


def main():
"""Run the example."""
print("=" * 60)
print("GPU Acceleration Example for museval")
print("=" * 60)

has_gpu = check_gpu_available()

if has_gpu:
print("\n✓ GPU detected and available")
else:
print("\n⚠️ GPU not available")
print("Install CuPy to enable GPU support:")
print(" pip install museval[gpu]")
print("\nContinuing with CPU-only demonstration...")

# Generate test data
print("\nGenerating test data (4 sources, 10 seconds, stereo)...")
reference, estimated = generate_test_data(
nsrc=4, duration_sec=10, sample_rate=44100, nchan=2
)

# Evaluate with CPU
cpu_time = evaluate_with_timing(
reference, estimated,
backend='numpy',
description='Evaluating with CPU (NumPy backend)'
)

# Evaluate with GPU if available
if has_gpu:
# Warmup run
print("\nWarming up GPU...")
_ = metrics.bss_eval(
reference[:, :1000, :], estimated[:, :1000, :],
backend='cupy'
)

# Actual evaluation
gpu_time = evaluate_with_timing(
reference, estimated,
backend='cupy',
description='Evaluating with GPU (CuPy backend)'
)

# Show speedup
speedup = cpu_time / gpu_time
print("\n" + "=" * 60)
print(f"Speedup: {speedup:.2f}x")
if speedup > 1:
print("✓ GPU is faster!")
else:
print("⚠️ CPU is faster for this workload")
print(" Try larger inputs for better GPU utilization")
print("=" * 60)

# Demonstrate environment variable usage
print("\n" + "=" * 60)
print("Alternative: Using environment variable")
print("=" * 60)
print("\nYou can set the backend globally using:")
print(" export MUSEVAL_BACKEND=cupy")
print("\nThen in your code:")
print(" sdr, isr, sir, sar, perm = metrics.bss_eval(")
print(" reference, estimated")
print(" )")
print("\nThe backend will automatically use GPU if available!")

# Demonstrate wrapper functions
print("\n" + "=" * 60)
print("Using wrapper functions")
print("=" * 60)
print("\nAll wrapper functions support the backend parameter:")
print(" - bss_eval_sources(reference, estimated, backend='cupy')")
print(" - bss_eval_images(reference, estimated, backend='cupy')")
print(" - bss_eval_sources_framewise(..., backend='cupy')")
print(" - bss_eval_images_framewise(..., backend='cupy')")


if __name__ == '__main__':
main()

99 changes: 99 additions & 0 deletions museval/backends/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
"""Backend abstraction layer for CPU and GPU computation.

This module provides a unified interface for switching between NumPy (CPU)
and CuPy (GPU) backends for BSS evaluation metrics computation.
"""

import os
import warnings


def _detect_best_backend():
"""Detect the best available backend (GPU first, then CPU).

Returns
-------
str
'cupy' if GPU is available, otherwise 'numpy'.
"""
try:
import cupy as cp

if cp.cuda.is_available():
return "cupy"
except (ImportError, RuntimeError):
pass
return "numpy"


def get_backend(backend="auto"):
"""Select and return the appropriate computation backend.

Parameters
----------
backend : str, optional
Backend to use: 'numpy' (CPU), 'cupy' (GPU), or 'auto'.
Default is 'auto', which attempts to use GPU if available,
otherwise falls back to CPU. The MUSEVAL_BACKEND environment
variable can override this behavior.

Returns
-------
backend : Backend
An instance of the selected backend implementing the Backend interface.

Raises
------
ValueError
If an unknown backend name is provided.

Examples
--------
>>> backend = get_backend('numpy')
>>> arr = backend.zeros((10, 10))
>>> backend = get_backend('cupy') # Requires CuPy installation
>>> backend = get_backend('auto') # Uses GPU if available, else CPU
"""
if backend == "auto":
# Check environment variable first
env_backend = os.environ.get("MUSEVAL_BACKEND", None)
if env_backend is not None:
backend = env_backend
else:
# Auto-detect: try GPU first, fall back to CPU
backend = _detect_best_backend()

backend = backend.lower()

if backend == "numpy":
from .numpy_backend import NumpyBackend

return NumpyBackend()
elif backend == "cupy":
try:
from .cupy_backend import CupyBackend

return CupyBackend()
except ImportError as e:
warnings.warn(
f"CuPy not available ({e}). Install with: "
"pip install museval[gpu]. Falling back to NumPy.",
UserWarning,
)
from .numpy_backend import NumpyBackend

return NumpyBackend()
except RuntimeError as e:
warnings.warn(
f"CUDA not available ({e}). Falling back to NumPy.", UserWarning
)
from .numpy_backend import NumpyBackend

return NumpyBackend()
else:
raise ValueError(
f"Unknown backend: '{backend}'. Valid options are: 'numpy', 'cupy', 'auto'"
)


__all__ = ["get_backend"]
Loading
Loading