import json import os from typing import Dict, Iterable, Iterator, List, Optional, Tuple import datasets _CITATION = """\ Please cite the PPMI and derivative providers as listed in CITATION.cff of this dataset repository. """ _DESCRIPTION = """\ Multi-modal derived brain network dataset (PPMI connectivity graphs) organized in a BIDS-ish derivatives layout. This builder exposes a tiny, fast "preview" split for interactive exploration on huggingface.co and quick local smoke tests. The preview embeds a downsampled correlation matrix (e.g., 8×8) for each row so the viewer can render a small numeric table. It also includes metadata (parcellation, subject) and array shapes. The heavy arrays remain on disk under the repository (not moved or renamed) and can be accessed via the provided file paths. Variable name fallbacks when reading .mat files mirror the main repository conventions: - timeseries: features_timeseries | timeseries | X - correlation: correlation_matrix | corr | A For larger slices (optional "dev" split), only metadata and file paths are exposed to keep the viewer light. """ _HOMEPAGE = "https://huggingface.co/datasets/pakkinlau/multi-modal-derived-brain-network" class MMDNConfig(datasets.BuilderConfig): def __init__(self, **kwargs): super().__init__(version=datasets.Version("1.0.0"), **kwargs) class MMDN(datasets.GeneratorBasedBuilder): BUILDER_CONFIGS = [ MMDNConfig(name="default", description="MMDN with preview (embedded tiny arrays) and optional dev metadata split"), ] DEFAULT_CONFIG_NAME = "default" def _info(self) -> datasets.DatasetInfo: # Features include a superset so both preview (with embedded small matrices) and dev (metadata-only) work. features = datasets.Features( { "parcellation": datasets.Value("string"), "subject": datasets.Value("string"), # Shapes as [n, n] and [n, t] "corr_shape": datasets.Sequence(datasets.Value("int32")), "ts_shape": datasets.Sequence(datasets.Value("int32")), # File paths (relative to repo root) "corr_path": datasets.Value("string"), "ts_path": datasets.Value("string"), # Tiny preview matrix (downsampled 8x8 top-left). For non-preview, this can be an empty list. "correlation_matrix": datasets.Sequence(datasets.Sequence(datasets.Value("float32"))), } ) return datasets.DatasetInfo( description=_DESCRIPTION, features=features, citation=_CITATION, homepage=_HOMEPAGE, ) def _split_generators(self, dl_manager: datasets.DownloadManager): base_dir = os.path.abspath(os.path.dirname(__file__)) def _maybe(path: str) -> Optional[str]: ap = os.path.join(base_dir, path) return ap if os.path.exists(ap) else None preview_manifest = _maybe(os.path.join("manifests", "preview.jsonl")) dev_manifest = _maybe(os.path.join("manifests", "dev.jsonl")) splits = [] if preview_manifest: splits.append( datasets.SplitGenerator(name=datasets.Split("preview"), gen_kwargs={"manifest_path": preview_manifest, "embed_preview": True}) ) if dev_manifest: splits.append( datasets.SplitGenerator(name=datasets.Split("dev"), gen_kwargs={"manifest_path": dev_manifest, "embed_preview": False}) ) # If no manifest is found, raise a helpful error. if not splits: raise FileNotFoundError( "No manifests found. Expected manifests/preview.jsonl (and optionally manifests/dev.jsonl) in the dataset repo." ) return splits def _generate_examples(self, manifest_path: str, embed_preview: bool) -> Iterator[Tuple[str, Dict]]: base_dir = os.path.abspath(os.path.dirname(__file__)) with open(manifest_path, "r", encoding="utf-8") as f: for idx, line in enumerate(f): if not line.strip(): continue row = json.loads(line) parcellation = row.get("parcellation") subject = row.get("subject") corr_rel = row.get("corr_path") ts_rel = row.get("ts_path") corr_path = os.path.join(base_dir, corr_rel) if corr_rel else None ts_path = os.path.join(base_dir, ts_rel) if ts_rel else None corr_shape, ts_shape = self._inspect_shapes(corr_path, ts_path) # Fallback: infer shapes from JSON sidecars if .mat loaders are unavailable if corr_shape is None and corr_path: corr_shape = self._infer_corr_shape_from_sidecar(corr_path) if ts_shape is None and ts_path: ts_shape = self._infer_ts_shape_from_sidecar(ts_path) preview_matrix: List[List[float]] = [] if embed_preview: # Prefer precomputed tiny preview JSON if present, else try reading from .mat preview_json = self._preview_json_for(parcellation, subject) if preview_json and os.path.exists(preview_json): try: with open(preview_json, "r", encoding="utf-8") as pj: arr = json.load(pj) if isinstance(arr, list) and (not arr or isinstance(arr[0], list)): # ensure float32 conversion preview_matrix = [[float(x) for x in row] for row in arr] except Exception: preview_matrix = [] elif corr_path and os.path.exists(corr_path): small = self._read_correlation_small(corr_path, size=8) if small is not None: preview_matrix = [[float(x) for x in row] for row in small.tolist()] example = { "parcellation": parcellation, "subject": subject, "corr_shape": list(corr_shape) if corr_shape else [], "ts_shape": list(ts_shape) if ts_shape else [], "corr_path": corr_rel or "", "ts_path": ts_rel or "", "correlation_matrix": preview_matrix, } # Unique key: combine split index + subject + parcellation key = f"{idx:06d}-{parcellation}-{subject}" yield key, example # --- Helpers --- @staticmethod def _try_import_mat_modules(): try: import scipy.io as sio # type: ignore except Exception as e: # pragma: no cover sio = None try: import mat73 # type: ignore except Exception: mat73 = None return sio, mat73 def _load_mat(self, path: str) -> Optional[Dict]: sio, mat73 = self._try_import_mat_modules() if sio is not None: try: return sio.loadmat(path, squeeze_me=True, simplify_cells=True) # type: ignore[arg-type] except NotImplementedError: pass except Exception: # Keep trying fallbacks pass if mat73 is not None: try: return mat73.loadmat(path) # type: ignore[attr-defined] except Exception: pass return None def _pick_var(self, d: Dict, candidates: List[str]) -> Optional[Tuple[str, object]]: for k in candidates: if k in d: return k, d[k] # Some loaders store keys lower/upper differently; try case-insensitive match lower_map = {k.lower(): k for k in d.keys()} for k in candidates: if k.lower() in lower_map: real_k = lower_map[k.lower()] return real_k, d[real_k] return None def _inspect_shapes(self, corr_path: Optional[str], ts_path: Optional[str]) -> Tuple[Optional[Tuple[int, int]], Optional[Tuple[int, int]]]: import numpy as np # local import to avoid hard dependency at import time corr_shape: Optional[Tuple[int, int]] = None ts_shape: Optional[Tuple[int, int]] = None if corr_path and os.path.exists(corr_path): data = self._load_mat(corr_path) if isinstance(data, dict): pick = self._pick_var(data, ["correlation_matrix", "corr", "A"]) if pick is not None: _, arr = pick try: a = np.asarray(arr) if a.ndim >= 2: corr_shape = (int(a.shape[-2]), int(a.shape[-1])) except Exception: pass if ts_path and os.path.exists(ts_path): data = self._load_mat(ts_path) if isinstance(data, dict): pick = self._pick_var(data, ["features_timeseries", "timeseries", "X"]) if pick is not None: _, arr = pick try: a = np.asarray(arr) if a.ndim >= 2: ts_shape = (int(a.shape[-2]), int(a.shape[-1])) except Exception: pass return corr_shape, ts_shape def _read_correlation_small(self, corr_path: str, size: int = 8): import numpy as np data = self._load_mat(corr_path) if not isinstance(data, dict): return None pick = self._pick_var(data, ["correlation_matrix", "corr", "A"]) if pick is None: return None _, arr = pick a = np.asarray(arr) if a.ndim < 2: return None n = min(size, a.shape[-1]) return a[:n, :n].astype("float32") # --- Sidecar & preview helpers --- def _infer_corr_shape_from_sidecar(self, corr_path: str) -> Optional[Tuple[int, int]]: sidecar = os.path.splitext(corr_path)[0] + ".json" if os.path.exists(sidecar): try: with open(sidecar, "r", encoding="utf-8") as f: meta = json.load(f) n = meta.get("NodeCount") if isinstance(n, int) and n > 0: return (n, n) except Exception: return None return None def _infer_ts_shape_from_sidecar(self, ts_path: str) -> Optional[Tuple[int, int]]: sidecar = os.path.splitext(ts_path)[0] + ".json" if os.path.exists(sidecar): try: with open(sidecar, "r", encoding="utf-8") as f: meta = json.load(f) n = meta.get("NodeCount") if isinstance(n, int) and n > 0: # length T unknown; return partial shape (n only) as (n, 0) isn't informative in the table return (n,) except Exception: return None return None def _preview_json_for(self, parcellation: Optional[str], subject: Optional[str]) -> Optional[str]: if not parcellation or not subject: return None base_dir = os.path.abspath(os.path.dirname(__file__)) # filename pattern: preview/____corr8x8.json return os.path.join(base_dir, "preview", f"{parcellation}__{subject}__corr8x8.json")