|
1 | 1 | """ Load external datasets. """ |
| 2 | +import json |
2 | 3 | import tempfile |
3 | 4 | import zipfile |
| 5 | +from collections import namedtuple |
4 | 6 | from pathlib import Path |
5 | 7 | from typing import List, Optional, Tuple, Union |
6 | 8 |
|
|
11 | 13 | from brainspace.mesh.mesh_operations import combine_surfaces |
12 | 14 | from brainspace.vtk_interface.wrappers.data_object import BSPolyData |
13 | 15 | from netneurotools import datasets as nnt_datasets |
14 | | -from netneurotools.civet import read_civet |
15 | 16 | from nibabel import load as nib_load |
16 | 17 | from nibabel.freesurfer.io import read_annot, read_geometry |
| 18 | +from sklearn.utils import Bunch |
| 19 | + |
| 20 | +try: |
| 21 | + from netneurotools.datasets.utils import _get_data_dir, _get_dataset_info |
| 22 | +except ImportError: |
| 23 | + from netneurotools.datasets.datasets_utils import _get_data_dir, _get_dataset_info |
| 24 | + |
| 25 | +try: |
| 26 | + from nilearn.datasets._utils import fetch_files as _fetch_files |
| 27 | +except ImportError: |
| 28 | + from nilearn.datasets.utils import fetch_files as _fetch_files |
17 | 29 |
|
18 | 30 | from brainstat._utils import ( |
19 | 31 | _download_file, |
|
24 | 36 | from brainstat.mesh.interpolate import _surf2surf |
25 | 37 |
|
26 | 38 |
|
| 39 | +def read_civet(fname: Union[str, Path]) -> Tuple[np.ndarray, np.ndarray]: |
| 40 | + """ |
| 41 | + Reads a CIVET surface file (MNI OBJ format). |
| 42 | +
|
| 43 | + Parameters |
| 44 | + ---------- |
| 45 | + fname : str or Path |
| 46 | + Path to the CIVET surface file. |
| 47 | +
|
| 48 | + Returns |
| 49 | + ------- |
| 50 | + vertices : numpy.ndarray |
| 51 | + (N, 3) array of vertex coordinates. |
| 52 | + faces : numpy.ndarray |
| 53 | + (M, 3) array of face indices. |
| 54 | + """ |
| 55 | + fname = str(fname) |
| 56 | + with open(fname, "rb") as f: |
| 57 | + first_char = f.read(1) |
| 58 | + |
| 59 | + if first_char == b"P": |
| 60 | + with open(fname, "r") as f: |
| 61 | + content = f.read().split() |
| 62 | + |
| 63 | + n_vertices = int(content[6]) |
| 64 | + |
| 65 | + idx = 7 |
| 66 | + # Vertices |
| 67 | + vertices = np.array(content[idx : idx + n_vertices * 3], dtype=float).reshape( |
| 68 | + -1, 3 |
| 69 | + ) |
| 70 | + idx += n_vertices * 3 |
| 71 | + |
| 72 | + # Normals |
| 73 | + idx += n_vertices * 3 |
| 74 | + |
| 75 | + n_triangles = int(content[idx]) |
| 76 | + idx += 1 |
| 77 | + |
| 78 | + color_flag = int(content[idx]) |
| 79 | + idx += 1 |
| 80 | + |
| 81 | + if color_flag == 0: |
| 82 | + idx += 4 |
| 83 | + else: |
| 84 | + idx += n_vertices * 4 |
| 85 | + |
| 86 | + # Check remaining items to decide if we need to skip |
| 87 | + remaining = len(content) - idx |
| 88 | + if remaining == n_triangles * 3 + n_triangles: |
| 89 | + idx += n_triangles |
| 90 | + |
| 91 | + triangles = np.array(content[idx:], dtype=int).reshape(-1, 3) |
| 92 | + |
| 93 | + return vertices, triangles |
| 94 | + else: |
| 95 | + raise NotImplementedError("Binary CIVET files are not supported yet.") |
| 96 | + |
| 97 | + |
27 | 98 | def fetch_parcellation( |
28 | 99 | template: str, |
29 | 100 | atlas: str, |
@@ -147,7 +218,7 @@ def fetch_template_surface( |
147 | 218 | surfaces_fs = [read_geometry(file) for file in surface_files] |
148 | 219 | surfaces = [build_polydata(surface[0], surface[1]) for surface in surfaces_fs] |
149 | 220 | elif template == "fslr32k": |
150 | | - surfaces = [read_surface(file) for file in surface_files] |
| 221 | + surfaces = [read_surface(str(file)) for file in surface_files] |
151 | 222 | else: |
152 | 223 | surfaces_obj = [read_civet(file) for file in surface_files] |
153 | 224 | surfaces = [build_polydata(surface[0], surface[1]) for surface in surfaces_obj] |
@@ -384,7 +455,7 @@ def _fetch_template_surface_files( |
384 | 455 |
|
385 | 456 | if template == "fslr32k": |
386 | 457 | layer = layer if layer else "midthickness" |
387 | | - bunch = nnt_datasets.fetch_conte69(data_dir=str(data_dir)) |
| 458 | + bunch = _fetch_conte69_fixed(data_dir=str(data_dir)) |
388 | 459 | elif template == "civet41k" or template == "civet164k": |
389 | 460 | layer = layer if layer else "mid" |
390 | 461 | if layer == "sphere": |
@@ -515,3 +586,43 @@ def _fetch_civet_spheres(template: str, data_dir: Path) -> Tuple[str, str]: |
515 | 586 |
|
516 | 587 | # Return two filenames to conform to other left/right hemisphere functions. |
517 | 588 | return (str(filename), str(filename)) |
| 589 | + |
| 590 | + |
| 591 | +SURFACE = namedtuple('Surface', ('lh', 'rh')) |
| 592 | + |
| 593 | + |
| 594 | +def _fetch_conte69_fixed(data_dir=None, url=None, resume=True, verbose=1): |
| 595 | + """ |
| 596 | + Download files for Van Essen et al., 2012 Conte69 template. |
| 597 | + Fixed version to handle encoding on Windows. |
| 598 | + """ |
| 599 | + dataset_name = 'tpl-conte69' |
| 600 | + keys = ['midthickness', 'inflated', 'vinflated'] |
| 601 | + |
| 602 | + data_dir = _get_data_dir(data_dir=data_dir) |
| 603 | + info = _get_dataset_info(dataset_name) |
| 604 | + if url is None: |
| 605 | + url = info['url'] |
| 606 | + |
| 607 | + opts = { |
| 608 | + 'uncompress': True, |
| 609 | + 'md5sum': info['md5'], |
| 610 | + 'move': '{}.tar.gz'.format(dataset_name) |
| 611 | + } |
| 612 | + |
| 613 | + filenames = [ |
| 614 | + 'tpl-conte69/tpl-conte69_space-MNI305_variant-fsLR32k_{}.{}.surf.gii' |
| 615 | + .format(res, hemi) for res in keys for hemi in ['L', 'R'] |
| 616 | + ] + ['tpl-conte69/template_description.json'] |
| 617 | + |
| 618 | + data = _fetch_files(data_dir, files=[(f, url, opts) for f in filenames], |
| 619 | + resume=resume, verbose=verbose) |
| 620 | + |
| 621 | + # Fix: Specify encoding='utf-8' |
| 622 | + with open(data[-1], 'r', encoding='utf-8') as src: |
| 623 | + data[-1] = json.load(src) |
| 624 | + |
| 625 | + # bundle hemispheres together |
| 626 | + data = [SURFACE(*data[:-1][i:i + 2]) for i in range(0, 6, 2)] + [data[-1]] |
| 627 | + |
| 628 | + return Bunch(**dict(zip(keys + ['info'], data))) |
0 commit comments