Coverage for nilearn/datasets/atlas.py: 10%
558 statements
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-20 10:58 +0200
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-20 10:58 +0200
1"""Downloading NeuroImaging datasets: atlas datasets."""
3import json
4import re
5import shutil
6import warnings
7from pathlib import Path
8from tempfile import mkdtemp
9from xml.etree import ElementTree
11import numpy as np
12import pandas as pd
13from nibabel import freesurfer, load
14from sklearn.utils import Bunch
16from nilearn._utils import check_niimg, fill_doc, logger, rename_parameters
17from nilearn._utils.bids import (
18 check_look_up_table,
19 generate_atlas_look_up_table,
20)
21from nilearn._utils.logger import find_stack_level
22from nilearn._utils.param_validation import check_params
23from nilearn.datasets._utils import (
24 PACKAGE_DIRECTORY,
25 fetch_files,
26 get_dataset_descr,
27 get_dataset_dir,
28)
29from nilearn.image import get_data as get_img_data
30from nilearn.image import new_img_like, reorder_img
32_TALAIRACH_LEVELS = ["hemisphere", "lobe", "gyrus", "tissue", "ba"]
35dec_to_hex_nums = pd.DataFrame(
36 {"hex": [f"{x:02x}" for x in range(256)]}, dtype=str
37)
39deprecation_message = (
40 "From release >={version}, "
41 "instead of returning several atlas image accessible "
42 "via different keys, "
43 "this fetcher will return the atlas as a dictionary "
44 "with a single atlas image, "
45 "accessible through a 'maps' key. "
46)
49def rgb_to_hex_lookup(
50 red: pd.Series, green: pd.Series, blue: pd.Series
51) -> pd.Series:
52 """Turn RGB in hex."""
53 # see https://stackoverflow.com/questions/53875880/convert-a-pandas-dataframe-of-rgb-colors-to-hex
54 # Look everything up
55 rr = dec_to_hex_nums.loc[red, "hex"]
56 gg = dec_to_hex_nums.loc[green, "hex"]
57 bb = dec_to_hex_nums.loc[blue, "hex"]
58 # Reindex
59 rr.index = red.index
60 gg.index = green.index
61 bb.index = blue.index
62 # Concatenate and return
63 return rr + gg + bb
66@fill_doc
67def fetch_atlas_difumo(
68 dimension=64,
69 resolution_mm=2,
70 data_dir=None,
71 resume=True,
72 verbose=1,
73):
74 """Fetch DiFuMo brain atlas.
76 Dictionaries of Functional Modes, or “DiFuMo”, can serve as
77 :term:`probabilistic atlases<Probabilistic atlas>` to extract
78 functional signals with different dimensionalities (64, 128,
79 256, 512, and 1024).
80 These modes are optimized to represent well raw :term:`BOLD` timeseries,
81 over a with range of experimental conditions.
82 See :footcite:t:`Dadi2020`.
84 .. versionadded:: 0.7.1
86 Notes
87 -----
88 Direct download links from OSF:
90 - 64: https://osf.io/pqu9r/download
91 - 128: https://osf.io/wjvd5/download
92 - 256: https://osf.io/3vrct/download
93 - 512: https://osf.io/9b76y/download
94 - 1024: https://osf.io/34792/download
96 Parameters
97 ----------
98 dimension : :obj:`int`, default=64
99 Number of dimensions in the dictionary. Valid resolutions
100 available are {64, 128, 256, 512, 1024}.
102 resolution_mm : :obj:`int`, default=2mm
103 The resolution in mm of the atlas to fetch. Valid options
104 available are {2, 3}.
105 %(data_dir)s
106 %(resume)s
107 %(verbose)s
109 Returns
110 -------
111 data : :class:`sklearn.utils.Bunch`
112 Dictionary-like object, the interest attributes are :
114 - 'maps': :obj:`str`, path to 4D nifti file containing regions
115 definition. The shape of the image is
116 ``(104, 123, 104, dimension)`` where ``dimension`` is the
117 requested dimension of the atlas.
119 - 'labels': :class:`pandas.DataFrame` containing the labels of
120 the regions.
121 The length of the label array corresponds to the
122 number of dimensions requested. ``data.labels[i]`` is the label
123 corresponding to volume ``i`` in the 'maps' image.
125 - %(description)s
127 - %(atlas_type)s
129 - %(template)s
131 References
132 ----------
133 .. footbibliography::
135 """
136 check_params(locals())
137 atlas_type = "probabilistic"
139 dic = {
140 64: "pqu9r",
141 128: "wjvd5",
142 256: "3vrct",
143 512: "9b76y",
144 1024: "34792",
145 }
146 valid_dimensions = [64, 128, 256, 512, 1024]
147 valid_resolution_mm = [2, 3]
148 if dimension not in valid_dimensions:
149 raise ValueError(
150 f"Requested dimension={dimension} is not available. "
151 f"Valid options: {valid_dimensions}"
152 )
153 if resolution_mm not in valid_resolution_mm:
154 raise ValueError(
155 f"Requested resolution_mm={resolution_mm} is not available. "
156 f"Valid options: {valid_resolution_mm}"
157 )
159 url = f"https://osf.io/{dic[dimension]}/download"
160 opts = {"uncompress": True}
162 csv_file = Path(f"{dimension}", f"labels_{dimension}_dictionary.csv")
163 if resolution_mm != 3:
164 nifti_file = Path(f"{dimension}", "2mm", "maps.nii.gz")
165 else:
166 nifti_file = Path(f"{dimension}", "3mm", "maps.nii.gz")
168 files = [
169 (csv_file, url, opts),
170 (nifti_file, url, opts),
171 ]
173 dataset_name = "difumo_atlases"
175 data_dir = get_dataset_dir(
176 dataset_name=dataset_name, data_dir=data_dir, verbose=verbose
177 )
179 # Download the zip file, first
180 files_ = fetch_files(data_dir, files, verbose=verbose, resume=resume)
181 labels = pd.read_csv(files_[0])
182 labels = labels.rename(columns={c: c.lower() for c in labels.columns})
184 # README
185 readme_files = [
186 ("README.md", "https://osf.io/4k9bf/download", {"move": "README.md"})
187 ]
188 if not (data_dir / "README.md").exists():
189 fetch_files(data_dir, readme_files, verbose=verbose, resume=resume)
191 return Atlas(
192 maps=files_[1],
193 labels=labels,
194 description=get_dataset_descr(dataset_name),
195 atlas_type=atlas_type,
196 template="MNI152NLin6Asym",
197 )
200@fill_doc
201def fetch_atlas_craddock_2012(
202 data_dir=None,
203 url=None,
204 resume=True,
205 verbose=1,
206 homogeneity=None,
207 grp_mean=True,
208):
209 """Download and return file names \
210 for the Craddock 2012 :term:`parcellation`.
212 This function returns a :term:`probabilistic atlas<Probabilistic atlas>`.
213 The provided images are in MNI152 space. All images are 4D with
214 shapes equal to ``(47, 56, 46, 43)``.
216 See :footcite:t:`CreativeCommons` for the license.
218 See :footcite:t:`Craddock2012` and :footcite:t:`nitrcClusterROI`
219 for more information on this :term:`parcellation`.
221 Parameters
222 ----------
223 %(data_dir)s
225 %(url)s
227 %(resume)s
229 %(verbose)s
231 homogeneity : :obj:`str`, default=None
232 The choice of the homogeneity ('spatial' or 'temporal' or 'random')
233 grp_mean : :obj:`bool`, default=True
234 The choice of the :term:`parcellation` (with group_mean or without)
237 Returns
238 -------
239 data : :class:`sklearn.utils.Bunch`
240 Dictionary-like object, keys are:
242 - ``'scorr_mean'``: :obj:`str`, path to nifti file containing
243 the group-mean :term:`parcellation`
244 when emphasizing spatial homogeneity.
246 - ``'tcorr_mean'``: :obj:`str`, path to nifti file containing
247 the group-mean parcellation when emphasizing temporal homogeneity.
249 - ``'scorr_2level'``: :obj:`str`, path to nifti file containing
250 the :term:`parcellation` obtained
251 when emphasizing spatial homogeneity.
253 - ``'tcorr_2level'``: :obj:`str`, path to nifti file containing
254 the :term:`parcellation` obtained
255 when emphasizing temporal homogeneity.
257 - ``'random'``: :obj:`str`, path to nifti file containing
258 the :term:`parcellation` obtained with random clustering.
260 - %(description)s
262 - %(atlas_type)s
264 - %(template)s
267 Warns
268 -----
269 DeprecationWarning
270 If an homogeneity input is provided, the current behavior
271 (returning multiple maps) is deprecated.
272 Starting in version 0.13, one map will be returned in a 'maps' dict key
273 depending on the homogeneity and grp_mean value.
275 References
276 ----------
277 .. footbibliography::
279 """
280 check_params(locals())
281 atlas_type = "probabilistic"
283 if url is None:
284 url = (
285 "https://cluster_roi.projects.nitrc.org"
286 "/Parcellations/craddock_2011_parcellations.tar.gz"
287 )
288 opts = {"uncompress": True}
290 dataset_name = "craddock_2012"
292 keys = (
293 "scorr_mean",
294 "tcorr_mean",
295 "scorr_2level",
296 "tcorr_2level",
297 "random",
298 )
299 filenames = [
300 ("scorr05_mean_all.nii.gz", url, opts),
301 ("tcorr05_mean_all.nii.gz", url, opts),
302 ("scorr05_2level_all.nii.gz", url, opts),
303 ("tcorr05_2level_all.nii.gz", url, opts),
304 ("random_all.nii.gz", url, opts),
305 ]
307 data_dir = get_dataset_dir(
308 dataset_name, data_dir=data_dir, verbose=verbose
309 )
311 sub_files = fetch_files(
312 data_dir, filenames, resume=resume, verbose=verbose
313 )
315 fdescr = get_dataset_descr(dataset_name)
317 if homogeneity:
318 if homogeneity in ["spatial", "temporal"]:
319 if grp_mean:
320 filename = [
321 (homogeneity[0] + "corr05_mean_all.nii.gz", url, opts)
322 ]
323 else:
324 filename = [
325 (homogeneity[0] + "corr05_2level_all.nii.gz", url, opts)
326 ]
327 else:
328 filename = [("random_all.nii.gz", url, opts)]
329 data = fetch_files(data_dir, filename, resume=resume, verbose=verbose)
331 return Atlas(
332 maps=data[0],
333 description=fdescr,
334 atlas_type=atlas_type,
335 )
337 warnings.warn(
338 category=DeprecationWarning,
339 message=(
340 deprecation_message.format(version="0.13")
341 + (
342 "To suppress this warning, "
343 "Please use the parameters 'homogeneity' and 'grp_mean' "
344 "to specify the exact atlas image you want."
345 )
346 ),
347 stacklevel=find_stack_level(),
348 )
350 params = dict(
351 [
352 ("description", fdescr),
353 *list(zip(keys, sub_files)),
354 ]
355 )
356 params["atlas_type"] = atlas_type
358 return Bunch(**params)
361@fill_doc
362def fetch_atlas_destrieux_2009(
363 lateralized=True,
364 data_dir=None,
365 url=None,
366 resume=True,
367 verbose=1,
368):
369 """Download and load the Destrieux cortical \
370 :term:`deterministic atlas<Deterministic atlas>` (dated 2009).
372 See :footcite:t:`Fischl2004`,
373 and :footcite:t:`Destrieux2009`.
375 .. note::
377 Some labels from the list of labels might not be present
378 in the atlas image,
379 in which case the integer values in the image
380 might not be consecutive.
382 Parameters
383 ----------
384 lateralized : :obj:`bool`, default=True
385 If True, returns an atlas with distinct regions for right and left
386 hemispheres.
387 %(data_dir)s
388 %(url)s
389 %(resume)s
390 %(verbose)s
392 Returns
393 -------
394 data : :class:`sklearn.utils.Bunch`
395 Dictionary-like object, contains:
397 - 'maps': :obj:`str`
398 path to nifti file containing the
399 :class:`~nibabel.nifti1.Nifti1Image` defining the cortical
400 ROIs, lateralized or not. The image has shape ``(76, 93, 76)``,
401 and contains integer values which can be interpreted as the
402 indices in the list of labels.
404 - %(labels)s
406 - %(description)s
408 - %(lut)s
410 - %(template)s
412 - %(atlas_type)s
414 References
415 ----------
416 .. footbibliography::
418 """
419 check_params(locals())
421 atlas_type = "deterministic"
423 if url is None:
424 url = "https://www.nitrc.org/frs/download.php/11942/"
426 url += "destrieux2009.tgz"
427 opts = {"uncompress": True}
428 lat = "_lateralized" if lateralized else ""
430 files = [
431 (f"destrieux2009_rois_labels{lat}.csv", url, opts),
432 (f"destrieux2009_rois{lat}.nii.gz", url, opts),
433 ("destrieux2009.rst", url, opts),
434 ]
436 dataset_name = "destrieux_2009"
437 data_dir = get_dataset_dir(
438 dataset_name, data_dir=data_dir, verbose=verbose
439 )
440 files_ = fetch_files(data_dir, files, resume=resume, verbose=verbose)
442 labels = pd.read_csv(files_[0], index_col=0)
444 return Atlas(
445 maps=files_[1],
446 labels=labels.name.to_list(),
447 description=Path(files_[2]).read_text(),
448 atlas_type=atlas_type,
449 lut=pd.read_csv(files_[0]),
450 template="fsaverage",
451 )
454@fill_doc
455def fetch_atlas_harvard_oxford(
456 atlas_name, data_dir=None, symmetric_split=False, resume=True, verbose=1
457):
458 """Load Harvard-Oxford parcellations from FSL.
460 This function downloads Harvard Oxford atlas packaged from FSL 5.0
461 and stores atlases in NILEARN_DATA folder in home directory.
463 This function can also load Harvard Oxford atlas from your local directory
464 specified by your FSL installed path given in `data_dir` argument.
465 See documentation for details.
467 .. note::
469 For atlases 'cort-prob-1mm', 'cort-prob-2mm', 'cortl-prob-1mm',
470 'cortl-prob-2mm', 'sub-prob-1mm', and 'sub-prob-2mm', the function
471 returns a :term:`Probabilistic atlas`, and the
472 :class:`~nibabel.nifti1.Nifti1Image` returned is 4D, with shape
473 ``(182, 218, 182, 48)``.
474 For :term:`deterministic atlases<Deterministic atlas>`, the
475 :class:`~nibabel.nifti1.Nifti1Image` returned is 3D, with
476 shape ``(182, 218, 182)`` and 48 regions (+ background).
478 Parameters
479 ----------
480 atlas_name : :obj:`str`
481 Name of atlas to load. Can be:
482 "cort-maxprob-thr0-1mm", "cort-maxprob-thr0-2mm",
483 "cort-maxprob-thr25-1mm", "cort-maxprob-thr25-2mm",
484 "cort-maxprob-thr50-1mm", "cort-maxprob-thr50-2mm",
485 "cort-prob-1mm", "cort-prob-2mm",
486 "cortl-maxprob-thr0-1mm", "cortl-maxprob-thr0-2mm",
487 "cortl-maxprob-thr25-1mm", "cortl-maxprob-thr25-2mm",
488 "cortl-maxprob-thr50-1mm", "cortl-maxprob-thr50-2mm",
489 "cortl-prob-1mm", "cortl-prob-2mm",
490 "sub-maxprob-thr0-1mm", "sub-maxprob-thr0-2mm",
491 "sub-maxprob-thr25-1mm", "sub-maxprob-thr25-2mm",
492 "sub-maxprob-thr50-1mm", "sub-maxprob-thr50-2mm",
493 "sub-prob-1mm", "sub-prob-2mm".
494 %(data_dir)s
495 Optionally, it can also be a FSL installation directory (which is
496 dependent on your installation).
497 Example, if FSL is installed in ``/usr/share/fsl/`` then
498 specifying as '/usr/share/' can get you the Harvard Oxford atlas
499 from your installed directory. Since we mimic the same root directory
500 as FSL to load it easily from your installation.
502 symmetric_split : :obj:`bool`, default=False
503 If ``True``, lateralized atlases of cort or sub with maxprob will be
504 returned. For subcortical types (``sub-maxprob``), we split every
505 symmetric region in left and right parts. Effectively doubles the
506 number of regions.
508 .. note::
509 Not implemented
510 for full :term:`Probabilistic atlas` (*-prob-* atlases).
512 %(resume)s
513 %(verbose)s
515 Returns
516 -------
517 data : :class:`sklearn.utils.Bunch`
518 Dictionary-like object, keys are:
520 - 'maps': :obj:`str`
521 path to nifti file containing the
522 atlas :class:`~nibabel.nifti1.Nifti1Image`.
523 It is a 4D image
524 if a :term:`Probabilistic atlas` is requested, and a 3D image
525 if a :term:`maximum probability atlas<Deterministic atlas>` is
526 requested.
527 In the latter case, the image contains integer
528 values which can be interpreted as the indices in the list
529 of labels.
531 .. note::
533 For some atlases, it can be the case that some regions
534 are empty. In this case, no :term:`voxels<voxel>` in the
535 map are assigned to these regions. So the number of
536 unique values in the map can be strictly smaller than the
537 number of region names in ``labels``.
539 - %(labels)s
541 - 'filename': Same as 'maps', kept for backward compatibility only.
543 - %(description)s
545 - %(lut)s
546 Only for deterministic version of the atlas.
548 - %(template)s
550 - %(atlas_type)s
552 See Also
553 --------
554 nilearn.datasets.fetch_atlas_juelich
556 """
557 check_params(locals())
559 atlases = [
560 "cort-maxprob-thr0-1mm",
561 "cort-maxprob-thr0-2mm",
562 "cort-maxprob-thr25-1mm",
563 "cort-maxprob-thr25-2mm",
564 "cort-maxprob-thr50-1mm",
565 "cort-maxprob-thr50-2mm",
566 "cort-prob-1mm",
567 "cort-prob-2mm",
568 "cortl-maxprob-thr0-1mm",
569 "cortl-maxprob-thr0-2mm",
570 "cortl-maxprob-thr25-1mm",
571 "cortl-maxprob-thr25-2mm",
572 "cortl-maxprob-thr50-1mm",
573 "cortl-maxprob-thr50-2mm",
574 "cortl-prob-1mm",
575 "cortl-prob-2mm",
576 "sub-maxprob-thr0-1mm",
577 "sub-maxprob-thr0-2mm",
578 "sub-maxprob-thr25-1mm",
579 "sub-maxprob-thr25-2mm",
580 "sub-maxprob-thr50-1mm",
581 "sub-maxprob-thr50-2mm",
582 "sub-prob-1mm",
583 "sub-prob-2mm",
584 ]
585 if atlas_name not in atlases:
586 atlases = "\n".join(atlases)
587 raise ValueError(
588 f"Invalid atlas name: {atlas_name}. "
589 f"Please choose an atlas among:\n{atlases}"
590 )
592 atlas_type = "probabilistic" if "-prob-" in atlas_name else "deterministic"
594 if atlas_type == "probabilistic" and symmetric_split:
595 raise ValueError(
596 "Region splitting not supported for probabilistic atlases"
597 )
598 (
599 atlas_img,
600 atlas_filename,
601 names,
602 is_lateralized,
603 ) = _get_atlas_data_and_labels(
604 "HarvardOxford",
605 atlas_name,
606 symmetric_split=symmetric_split,
607 data_dir=data_dir,
608 resume=resume,
609 verbose=verbose,
610 )
612 atlas_niimg = check_niimg(atlas_img)
613 if not symmetric_split or is_lateralized:
614 return Atlas(
615 maps=atlas_niimg,
616 labels=names,
617 description=get_dataset_descr("harvard_oxford"),
618 atlas_type=atlas_type,
619 lut=generate_atlas_look_up_table(
620 "fetch_atlas_harvard_oxford", name=names
621 ),
622 filename=atlas_filename,
623 template="MNI152NLin6Asym",
624 )
626 new_atlas_data, new_names = _compute_symmetric_split(
627 "HarvardOxford", atlas_niimg, names
628 )
629 new_atlas_niimg = new_img_like(
630 atlas_niimg, new_atlas_data, atlas_niimg.affine
631 )
633 return Atlas(
634 maps=new_atlas_niimg,
635 labels=new_names,
636 description=get_dataset_descr("harvard_oxford"),
637 atlas_type=atlas_type,
638 lut=generate_atlas_look_up_table(
639 "fetch_atlas_harvard_oxford", name=new_names
640 ),
641 filename=atlas_filename,
642 template="MNI152NLin6Asym",
643 )
646@fill_doc
647def fetch_atlas_juelich(
648 atlas_name, data_dir=None, symmetric_split=False, resume=True, verbose=1
649):
650 """Load Juelich parcellations from FSL.
652 This function downloads Juelich atlas packaged from FSL 5.0
653 and stores atlases in NILEARN_DATA folder in home directory.
655 This function can also load Juelich atlas from your local directory
656 specified by your FSL installed path given in `data_dir` argument.
657 See documentation for details.
659 .. versionadded:: 0.8.1
661 .. note::
663 For atlases 'prob-1mm', and 'prob-2mm', the function returns a
664 :term:`Probabilistic atlas`, and the
665 :class:`~nibabel.nifti1.Nifti1Image` returned is 4D, with shape
666 ``(182, 218, 182, 62)``.
667 For :term:`deterministic atlases<Deterministic atlas>`, the
668 :class:`~nibabel.nifti1.Nifti1Image` returned is 3D, with shape
669 ``(182, 218, 182)`` and 62 regions (+ background).
671 Parameters
672 ----------
673 atlas_name : :obj:`str`
674 Name of atlas to load. Can be:
675 "maxprob-thr0-1mm", "maxprob-thr0-2mm",
676 "maxprob-thr25-1mm", "maxprob-thr25-2mm",
677 "maxprob-thr50-1mm", "maxprob-thr50-2mm",
678 "prob-1mm", "prob-2mm".
679 %(data_dir)s
680 Optionally, it can also be a FSL installation directory (which is
681 dependent on your installation).
682 Example, if FSL is installed in ``/usr/share/fsl/``, then
683 specifying as '/usr/share/' can get you Juelich atlas
684 from your installed directory. Since we mimic same root directory
685 as FSL to load it easily from your installation.
687 symmetric_split : :obj:`bool`, default=False
688 If ``True``, lateralized atlases of cort or sub with maxprob will be
689 returned. For subcortical types (``sub-maxprob``), we split every
690 symmetric region in left and right parts. Effectively doubles the
691 number of regions.
693 .. note::
694 Not implemented for full :term:`Probabilistic atlas`
695 (``*-prob-*`` atlases).
697 %(resume)s
698 %(verbose)s
700 Returns
701 -------
702 data : :class:`sklearn.utils.Bunch`
703 Dictionary-like object, keys are:
705 - 'maps': :class:`~nibabel.nifti1.Nifti1Image`.
706 It is a 4D image if a :term:`Probabilistic atlas` is requested,
707 and a 3D image
708 if a :term:`maximum probability atlas<Deterministic atlas>`
709 is requested.
710 In the latter case, the image contains integer values
711 which can be interpreted as the indices in the list of labels.
713 .. note::
715 For some atlases, it can be the case that some regions
716 are empty. In this case, no :term:`voxels<voxel>` in the
717 map are assigned to these regions. So the number of
718 unique values in the map can be strictly smaller than the
719 number of region names in ``labels``.
721 - %(labels)s
723 - 'filename': Same as 'maps', kept for backward compatibility only.
725 - %(description)s
727 - %(lut)s
728 Only for deterministic version of the atlas.
730 - %(template)s
732 - %(atlas_type)s
734 See Also
735 --------
736 nilearn.datasets.fetch_atlas_harvard_oxford
738 """
739 check_params(locals())
741 atlases = [
742 "maxprob-thr0-1mm",
743 "maxprob-thr0-2mm",
744 "maxprob-thr25-1mm",
745 "maxprob-thr25-2mm",
746 "maxprob-thr50-1mm",
747 "maxprob-thr50-2mm",
748 "prob-1mm",
749 "prob-2mm",
750 ]
751 if atlas_name not in atlases:
752 atlases = "\n".join(atlases)
753 raise ValueError(
754 f"Invalid atlas name: {atlas_name}. "
755 f"Please choose an atlas among:\n{atlases}"
756 )
758 atlas_type = (
759 "probabilistic" if atlas_name.startswith("prob-") else "deterministic"
760 )
762 if atlas_type == "probabilistic" and symmetric_split:
763 raise ValueError(
764 "Region splitting not supported for probabilistic atlases"
765 )
766 atlas_img, atlas_filename, names, _ = _get_atlas_data_and_labels(
767 "Juelich",
768 atlas_name,
769 data_dir=data_dir,
770 resume=resume,
771 verbose=verbose,
772 )
773 atlas_niimg = check_niimg(atlas_img)
774 atlas_data = get_img_data(atlas_niimg)
776 if atlas_type == "probabilistic":
777 new_atlas_data, new_names = _merge_probabilistic_maps_juelich(
778 atlas_data, names
779 )
780 elif symmetric_split:
781 new_atlas_data, new_names = _compute_symmetric_split(
782 "Juelich", atlas_niimg, names
783 )
784 else:
785 new_atlas_data, new_names = _merge_labels_juelich(atlas_data, names)
787 new_atlas_niimg = new_img_like(
788 atlas_niimg, new_atlas_data, atlas_niimg.affine
789 )
791 return Atlas(
792 maps=new_atlas_niimg,
793 labels=list(new_names),
794 description=get_dataset_descr("juelich"),
795 atlas_type=atlas_type,
796 lut=generate_atlas_look_up_table(
797 "fetch_atlas_juelich", name=list(new_names)
798 ),
799 filename=atlas_filename,
800 )
803def _get_atlas_data_and_labels(
804 atlas_source,
805 atlas_name,
806 symmetric_split=False,
807 data_dir=None,
808 resume=True,
809 verbose=1,
810):
811 """Implement fetching logic common to \
812 both fetch_atlas_juelich and fetch_atlas_harvard_oxford.
814 This function downloads the atlas image and labels.
815 """
816 if atlas_source == "Juelich":
817 url = "https://www.nitrc.org/frs/download.php/12096/Juelich.tgz"
818 elif atlas_source == "HarvardOxford":
819 url = "https://www.nitrc.org/frs/download.php/9902/HarvardOxford.tgz"
820 else:
821 raise ValueError(f"Atlas source {atlas_source} is not valid.")
822 # For practical reasons, we mimic the FSL data directory here.
823 data_dir = get_dataset_dir("fsl", data_dir=data_dir, verbose=verbose)
824 opts = {"uncompress": True}
825 root = Path("data", "atlases")
827 if atlas_source == "HarvardOxford":
828 if symmetric_split:
829 atlas_name = atlas_name.replace("cort-max", "cortl-max")
831 if atlas_name.startswith("sub-"):
832 label_file = "HarvardOxford-Subcortical.xml"
833 is_lateralized = False
834 elif atlas_name.startswith("cortl"):
835 label_file = "HarvardOxford-Cortical-Lateralized.xml"
836 is_lateralized = True
837 else:
838 label_file = "HarvardOxford-Cortical.xml"
839 is_lateralized = False
840 else:
841 label_file = "Juelich.xml"
842 is_lateralized = False
843 label_file = root / label_file
844 atlas_file = root / atlas_source / f"{atlas_source}-{atlas_name}.nii.gz"
845 atlas_file, label_file = fetch_files(
846 data_dir,
847 [(atlas_file, url, opts), (label_file, url, opts)],
848 resume=resume,
849 verbose=verbose,
850 )
851 # Reorder image to have positive affine diagonal
852 atlas_img = reorder_img(atlas_file, copy_header=True)
853 names = {0: "Background"}
855 all_labels = ElementTree.parse(label_file).findall(".//label")
856 for label in all_labels:
857 new_idx = int(label.get("index")) + 1
858 if new_idx in names:
859 raise ValueError(
860 f"Duplicate index {new_idx} for labels "
861 f"'{names[new_idx]}', and '{label.text}'"
862 )
864 # fix typos in Harvard Oxford labels
865 if atlas_source == "HarvardOxford":
866 label.text = label.text.replace("Ventrical", "Ventricle")
867 label.text = label.text.replace("Operculum", "Opercular")
869 names[new_idx] = label.text.strip()
871 # The label indices should range from 0 to nlabel + 1
872 assert list(names.keys()) == list(range(len(all_labels) + 1))
873 names = [item[1] for item in sorted(names.items())]
874 return atlas_img, atlas_file, names, is_lateralized
877def _merge_probabilistic_maps_juelich(atlas_data, names):
878 """Handle probabilistic juelich atlases when symmetric_split=False.
880 Helper function for fetch_atlas_juelich.
882 In this situation, we need to merge labels and maps corresponding
883 to left and right regions.
884 """
885 new_names = np.unique([re.sub(r" (L|R)$", "", name) for name in names])
886 new_name_to_idx = {k: v - 1 for v, k in enumerate(new_names)}
887 new_atlas_data = np.zeros((*atlas_data.shape[:3], len(new_names) - 1))
888 for i, name in enumerate(names):
889 if name != "Background":
890 new_name = re.sub(r" (L|R)$", "", name)
891 new_atlas_data[..., new_name_to_idx[new_name]] += atlas_data[
892 ..., i - 1
893 ]
894 return new_atlas_data, new_names
897def _merge_labels_juelich(atlas_data, names):
898 """Handle 3D atlases when symmetric_split=False.
900 Helper function for fetch_atlas_juelich.
902 In this case, we need to merge the labels corresponding to
903 left and right regions.
904 """
905 new_names = np.unique([re.sub(r" (L|R)$", "", name) for name in names])
906 new_names_dict = {k: v for v, k in enumerate(new_names)}
907 new_atlas_data = atlas_data.copy()
908 for label, name in enumerate(names):
909 new_name = re.sub(r" (L|R)$", "", name)
910 new_atlas_data[atlas_data == label] = new_names_dict[new_name]
911 return new_atlas_data, new_names
914def _compute_symmetric_split(source, atlas_niimg, names):
915 """Handle 3D atlases when symmetric_split=True.
917 Helper function for both fetch_atlas_juelich and
918 fetch_atlas_harvard_oxford.
919 """
920 # The atlas_niimg should have been passed to
921 # reorder_img such that the affine's diagonal
922 # should be positive. This is important to
923 # correctly split left and right hemispheres.
924 assert atlas_niimg.affine[0, 0] > 0
925 atlas_data = get_img_data(atlas_niimg)
926 labels = np.unique(atlas_data)
927 # Build a mask of both halves of the brain
928 middle_ind = (atlas_data.shape[0]) // 2
929 # Split every zone crossing the median plane into two parts.
930 left_atlas = atlas_data.copy()
931 left_atlas[middle_ind:] = 0
932 right_atlas = atlas_data.copy()
933 right_atlas[:middle_ind] = 0
935 if source == "Juelich":
936 for idx, name in enumerate(names):
937 if name.endswith("L"):
938 name = re.sub(r" L$", "", name)
939 names[idx] = f"Left {name}"
940 if name.endswith("R"):
941 name = re.sub(r" R$", "", name)
942 names[idx] = f"Right {name}"
944 new_label = 0
945 new_atlas = atlas_data.copy()
946 # Assumes that the background label is zero.
947 new_names = [names[0]]
948 for label, name in zip(labels[1:], names[1:]):
949 new_label += 1
950 left_elements = (left_atlas == label).sum()
951 right_elements = (right_atlas == label).sum()
952 n_elements = float(left_elements + right_elements)
953 if (
954 left_elements / n_elements < 0.05
955 or right_elements / n_elements < 0.05
956 ):
957 new_atlas[atlas_data == label] = new_label
958 new_names.append(name)
959 continue
960 new_atlas[left_atlas == label] = new_label
961 new_names.append(f"Left {name}")
962 new_label += 1
963 new_atlas[right_atlas == label] = new_label
964 new_names.append(f"Right {name}")
965 return new_atlas, new_names
968@fill_doc
969def fetch_atlas_msdl(data_dir=None, url=None, resume=True, verbose=1):
970 """Download and load the MSDL brain :term:`Probabilistic atlas`.
972 It can be downloaded at :footcite:t:`atlas_msdl`, and cited
973 using :footcite:t:`Varoquaux2011`.
974 See also :footcite:t:`Varoquaux2013` for more information.
976 Parameters
977 ----------
978 %(data_dir)s
979 %(url)s
980 %(resume)s
981 %(verbose)s
983 Returns
984 -------
985 data : :class:`sklearn.utils.Bunch`
986 Dictionary-like object, the interest attributes are :
988 - 'maps': :obj:`str`
989 path to nifti file containing the
990 :term:`Probabilistic atlas` image
991 (shape is equal to ``(40, 48, 35, 39)``).
993 - %(labels)s
994 There are 39 labels such that ``data.labels[i]``
995 corresponds to map ``i``.
997 - 'region_coords': :obj:`list` of length-3 :obj:`tuple`
998 ``data.region_coords[i]`` contains the coordinates ``(x, y, z)``
999 of region ``i`` in :term:`MNI` space.
1001 - 'networks': :obj:`list` of :obj:`str`
1002 list containing the names of the networks.
1003 There are 39 network names such that
1004 ``data.networks[i]`` is the network name of region ``i``.
1006 - %(description)s
1008 - %(atlas_type)s
1010 - %(template)s
1012 References
1013 ----------
1014 .. footbibliography::
1017 """
1018 check_params(locals())
1020 atlas_type = "probabilistic"
1022 url = "https://team.inria.fr/parietal/files/2015/01/MSDL_rois.zip"
1023 opts = {"uncompress": True}
1025 dataset_name = "msdl_atlas"
1026 files = [
1027 (Path("MSDL_rois", "msdl_rois_labels.csv"), url, opts),
1028 (Path("MSDL_rois", "msdl_rois.nii"), url, opts),
1029 ]
1031 data_dir = get_dataset_dir(
1032 dataset_name, data_dir=data_dir, verbose=verbose
1033 )
1034 files = fetch_files(data_dir, files, resume=resume, verbose=verbose)
1036 csv_data = pd.read_csv(files[0])
1037 net_names = [
1038 net_name.strip() for net_name in csv_data["net name"].to_list()
1039 ]
1041 return Atlas(
1042 maps=files[1],
1043 labels=[name.strip() for name in csv_data["name"].to_list()],
1044 description=get_dataset_descr(dataset_name),
1045 atlas_type=atlas_type,
1046 region_coords=csv_data[["x", "y", "z"]].to_numpy().tolist(),
1047 networks=net_names,
1048 )
1051@fill_doc
1052def fetch_coords_power_2011():
1053 """Download and load the Power et al. brain atlas composed of 264 ROIs.
1055 See :footcite:t:`Power2011`.
1057 Returns
1058 -------
1059 data : :class:`sklearn.utils.Bunch`
1060 Dictionary-like object, contains:
1062 - 'rois': :class:`pandas.DataFrame`
1063 Contains the coordinates of 264 ROIs in :term:`MNI` space.
1065 - %(description)s
1068 References
1069 ----------
1070 .. footbibliography::
1072 """
1073 dataset_name = "power_2011"
1074 fdescr = get_dataset_descr(dataset_name)
1075 csv = PACKAGE_DIRECTORY / "data" / "power_2011.csv"
1076 params = {"rois": pd.read_csv(csv), "description": fdescr}
1077 params["rois"] = params["rois"].rename(
1078 columns={c: c.lower() for c in params["rois"].columns}
1079 )
1081 return Bunch(**params)
1084@fill_doc
1085def fetch_atlas_smith_2009(
1086 data_dir=None,
1087 url=None,
1088 resume=True,
1089 verbose=1,
1090 mirror="origin",
1091 dimension=None,
1092 resting=True,
1093):
1094 """Download and load the Smith :term:`ICA` and BrainMap \
1095 :term:`Probabilistic atlas` (2009).
1097 See :footcite:t:`Smith2009b` and :footcite:t:`Laird2011`.
1099 Parameters
1100 ----------
1101 %(data_dir)s
1103 %(url)s
1105 %(resume)s
1107 %(verbose)s
1109 mirror : :obj:`str`, default='origin'
1110 By default, the dataset is downloaded from the original website of the
1111 atlas. Specifying "nitrc" will force download from a mirror, with
1112 potentially higher bandwidth.
1114 dimension : :obj:`int`, default=None
1115 Number of dimensions in the dictionary. Valid resolutions
1116 available are {10, 20, 70}.
1118 resting : :obj:`bool`, default=True
1119 Either to fetch the resting-:term:`fMRI` or BrainMap components
1121 Returns
1122 -------
1123 data : :class:`sklearn.utils.Bunch`
1124 Dictionary-like object, contains:
1126 - ``'rsn20'``: :obj:`str`
1127 Path to nifti file containing
1128 the 20-dimensional :term:`ICA`, resting-:term:`fMRI` components.
1129 The shape of the image is ``(91, 109, 91, 20)``.
1131 - ``'rsn10'``: :obj:`str`
1132 Path to nifti file containing
1133 the 10 well-matched maps from the 20 maps obtained as for 'rsn20',
1134 as shown in :footcite:t:`Smith2009b`.
1135 The shape of the image is ``(91, 109, 91, 10)``.
1137 - ``'bm20'``: :obj:`str`
1138 Path to nifti file containing
1139 the 20-dimensional :term:`ICA`, BrainMap components.
1140 The shape of the image is ``(91, 109, 91, 20)``.
1142 - ``'bm10'``: :obj:`str`
1143 Path to nifti file containing
1144 the 10 well-matched maps from the 20 maps obtained as for 'bm20',
1145 as shown in :footcite:t:`Smith2009b`.
1146 The shape of the image is ``(91, 109, 91, 10)``.
1148 - ``'rsn70'``: :obj:`str`
1149 Path to nifti file containing
1150 the 70-dimensional :term:`ICA`, resting-:term:`fMRI` components.
1151 The shape of the image is ``(91, 109, 91, 70)``.
1153 - ``'bm70'``: :obj:`str`
1154 Path to nifti file containing
1155 the 70-dimensional :term:`ICA`, BrainMap components.
1156 The shape of the image is ``(91, 109, 91, 70)``.
1158 - %(description)s
1160 - %(atlas_type)s
1162 - %(template)s
1164 Warns
1165 -----
1166 DeprecationWarning
1167 If a dimension input is provided, the current behavior
1168 (returning multiple maps) is deprecated.
1169 Starting in version 0.13, one map will be returned in a 'maps' dict key
1170 depending on the dimension and resting value.
1172 References
1173 ----------
1174 .. footbibliography::
1176 Notes
1177 -----
1178 For more information about this dataset's structure:
1179 https://www.fmrib.ox.ac.uk/datasets/brainmap+rsns/
1181 """
1182 check_params(locals())
1184 atlas_type = "probabilistic"
1186 if url is None:
1187 if mirror == "origin":
1188 url = "https://www.fmrib.ox.ac.uk/datasets/brainmap+rsns/"
1189 elif mirror == "nitrc":
1190 url = [
1191 "https://www.nitrc.org/frs/download.php/7730/",
1192 "https://www.nitrc.org/frs/download.php/7729/",
1193 "https://www.nitrc.org/frs/download.php/7731/",
1194 "https://www.nitrc.org/frs/download.php/7726/",
1195 "https://www.nitrc.org/frs/download.php/7728/",
1196 "https://www.nitrc.org/frs/download.php/7727/",
1197 ]
1198 else:
1199 raise ValueError(
1200 f'Unknown mirror "{mirror!s}". '
1201 'Mirror must be "origin" or "nitrc"'
1202 )
1204 files = {
1205 "rsn20": "rsn20.nii.gz",
1206 "rsn10": "PNAS_Smith09_rsn10.nii.gz",
1207 "rsn70": "rsn70.nii.gz",
1208 "bm20": "bm20.nii.gz",
1209 "bm10": "PNAS_Smith09_bm10.nii.gz",
1210 "bm70": "bm70.nii.gz",
1211 }
1213 if isinstance(url, str):
1214 url = [url] * len(files)
1216 dataset_name = "smith_2009"
1217 data_dir = get_dataset_dir(
1218 dataset_name, data_dir=data_dir, verbose=verbose
1219 )
1221 fdescr = get_dataset_descr(dataset_name)
1223 if dimension:
1224 key = f"{'rsn' if resting else 'bm'}{dimension}"
1225 key_index = list(files).index(key)
1227 file = [(files[key], url[key_index] + files[key], {})]
1228 data = fetch_files(data_dir, file, resume=resume, verbose=verbose)
1230 return Atlas(
1231 maps=data[0],
1232 description=fdescr,
1233 atlas_type=atlas_type,
1234 )
1236 warnings.warn(
1237 category=DeprecationWarning,
1238 message=(
1239 deprecation_message.format(version="0.13")
1240 + (
1241 "To suppress this warning, "
1242 "Please use the parameters 'dimension' and 'resting' "
1243 "to specify the exact atlas image you want."
1244 )
1245 ),
1246 stacklevel=find_stack_level(),
1247 )
1249 keys = list(files.keys())
1250 files = [(f, u + f, {}) for f, u in zip(files.values(), url)]
1251 files_ = fetch_files(data_dir, files, resume=resume, verbose=verbose)
1252 params = dict(zip(keys, files_))
1254 params["description"] = fdescr
1255 params["atlas_type"] = atlas_type
1257 return Bunch(**params)
1260@fill_doc
1261def fetch_atlas_yeo_2011(
1262 data_dir=None,
1263 url=None,
1264 resume=True,
1265 verbose=1,
1266 n_networks=None,
1267 thickness=None,
1268):
1269 """Download and return file names for the Yeo 2011 :term:`parcellation`.
1271 This function retrieves the so-called yeo
1272 :term:`deterministic atlases<Deterministic atlas>`. The provided images
1273 are in MNI152 space and have shapes equal to ``(256, 256, 256, 1)``.
1274 They contain consecutive integers values from 0 (background) to either
1275 7 or 17 depending on the atlas version considered.
1277 For more information on this dataset's structure,
1278 see :footcite:t:`CorticalParcellation_Yeo2011`,
1279 and :footcite:t:`Yeo2011`.
1281 Parameters
1282 ----------
1283 %(data_dir)s
1284 %(url)s
1285 %(resume)s
1286 %(verbose)s
1288 n_networks : {7, 17, None}, default = None
1289 If not None,
1290 then only specific version of the atlas is returned:
1292 - 7 networks parcellation,
1293 - 17 networks parcellation.
1295 If ``thickness`` is not None, this will default to ``7``.
1296 The default will be set to ``7`` in version 0.13.2.
1298 .. versionadded:: 0.11.2dev
1300 thickness : {"thin", "thick", None}, default = None
1301 If not None,
1302 then only specific version of the atlas is returned:
1304 - ``"thick"``: parcellation fitted to thick cortex segmentations,
1305 - ``"thin"``: parcellation fitted to thin cortex segmentations.
1307 If ``n_networks`` is not None, this will default to ``"thick"``.
1308 The default will be set to ``"thick"`` in version 0.13.2.
1310 .. versionadded:: 0.11.2dev
1312 Returns
1313 -------
1314 data : :class:`sklearn.utils.Bunch`
1315 Dictionary-like object.
1317 If ``n_networks`` and ``thickness`` are None, keys are:
1319 - 'thin_7': :obj:`str`
1320 Path to nifti file containing the
1321 7 networks :term:`parcellation` fitted to thin template cortex
1322 segmentations.
1323 The image contains integer values which can be
1324 interpreted as the indices in ``colors_7``.
1326 - 'thick_7': :obj:`str`
1327 Path to nifti file containing the
1328 7 networks :term:`parcellation` fitted to thick template cortex
1329 segmentations.
1330 The image contains integer values which can be
1331 interpreted as the indices in ``colors_7``.
1333 - 'thin_17': :obj:`str`
1334 Path to nifti file containing the
1335 17 networks :term:`parcellation` fitted to thin template cortex
1336 segmentations.
1337 The image contains integer values which can be
1338 interpreted as the indices in ``colors_17``.
1340 - 'thick_17': :obj:`str`
1341 Path to nifti file containing the
1342 17 networks :term:`parcellation` fitted to thick template cortex
1343 segmentations.
1344 The image contains integer values which can be
1345 interpreted as the indices in ``colors_17``.
1347 - 'colors_7': :obj:`str`
1348 Path to colormaps text file for
1349 7 networks :term:`parcellation`.
1350 This file maps :term:`voxel` integer
1351 values from ``data.thin_7`` and ``data.tick_7`` to network names.
1353 - 'colors_17': :obj:`str`
1354 Path to colormaps text file for
1355 17 networks :term:`parcellation`.
1356 This file maps :term:`voxel` integer
1357 values from ``data.thin_17`` and ``data.tick_17``
1358 to network names.
1360 - 'anat': :obj:`str`
1361 Path to nifti file containing the anatomy image.
1363 - %(description)s
1365 - %(template)s
1367 - %(atlas_type)s
1369 otherwise the keys are:
1371 - 'anat': :obj:`str`
1372 Path to nifti file containing the anatomy image.
1374 - 'maps': 3D :class:`~nibabel.nifti1.Nifti1Image`.
1375 The image contains integer values for each network.
1377 - %(labels)s
1379 - %(lut)s
1381 - %(description)s
1383 - %(template)s
1385 - %(atlas_type)s
1387 References
1388 ----------
1389 .. footbibliography::
1391 Notes
1392 -----
1393 License: unknown.
1395 """
1396 check_params(locals())
1398 atlas_type = "deterministic"
1400 if n_networks is None and thickness is None:
1401 warnings.warn(
1402 category=DeprecationWarning,
1403 message=(
1404 deprecation_message.format(version="0.13.2")
1405 + (
1406 "To suppress this warning, "
1407 "Please use the parameters 'n_networks' and 'thickness' "
1408 "to specify the exact atlas image you want."
1409 )
1410 ),
1411 stacklevel=find_stack_level(),
1412 )
1414 if n_networks is not None:
1415 if n_networks not in (7, 17):
1416 raise ValueError(
1417 f"'n_networks' must be 7 or 17. Got {n_networks=}"
1418 )
1419 if thickness is None:
1420 thickness = "thick"
1421 if thickness is not None:
1422 if thickness not in ("thin", "thick"):
1423 raise ValueError(
1424 f"'thickness' must be 'thin' or 'thick'. Got {thickness=}"
1425 )
1426 if n_networks is None:
1427 n_networks = 7
1429 if url is None:
1430 url = (
1431 "ftp://surfer.nmr.mgh.harvard.edu/pub/data/"
1432 "Yeo_JNeurophysiol11_MNI152.zip"
1433 )
1434 opts = {"uncompress": True}
1436 dataset_name = "yeo_2011"
1437 keys = (
1438 "thin_7",
1439 "thick_7",
1440 "thin_17",
1441 "thick_17",
1442 "colors_7",
1443 "colors_17",
1444 "anat",
1445 )
1446 basenames = (
1447 "Yeo2011_7Networks_MNI152_FreeSurferConformed1mm.nii.gz",
1448 "Yeo2011_7Networks_MNI152_FreeSurferConformed1mm_LiberalMask.nii.gz",
1449 "Yeo2011_17Networks_MNI152_FreeSurferConformed1mm.nii.gz",
1450 "Yeo2011_17Networks_MNI152_FreeSurferConformed1mm_LiberalMask.nii.gz",
1451 "Yeo2011_7Networks_ColorLUT.txt",
1452 "Yeo2011_17Networks_ColorLUT.txt",
1453 "FSL_MNI152_FreeSurferConformed_1mm.nii.gz",
1454 )
1456 filenames = [
1457 (Path("Yeo_JNeurophysiol11_MNI152", f), url, opts) for f in basenames
1458 ]
1460 data_dir = get_dataset_dir(
1461 dataset_name, data_dir=data_dir, verbose=verbose
1462 )
1463 sub_files = fetch_files(
1464 data_dir, filenames, resume=resume, verbose=verbose
1465 )
1467 fdescr = get_dataset_descr(dataset_name)
1469 params = dict(
1470 [
1471 ("description", fdescr),
1472 ("atlas_type", atlas_type),
1473 *list(zip(keys, sub_files)),
1474 ]
1475 )
1477 if n_networks and thickness:
1478 lut_file = (
1479 params["colors_7"] if n_networks == 7 else params["colors_17"]
1480 )
1481 lut = pd.read_csv(
1482 lut_file,
1483 sep="\\s+",
1484 names=["index", "name", "r", "g", "b", "fs"],
1485 header=0,
1486 )
1487 lut = _update_lut_freesurder(lut)
1489 maps = params[f"{thickness}_{n_networks}"]
1491 return Atlas(
1492 maps=maps,
1493 labels=lut.name.to_list(),
1494 description=fdescr,
1495 template="MNI152NLin6Asym",
1496 lut=lut,
1497 atlas_type=atlas_type,
1498 anat=params["anat"],
1499 )
1501 return Bunch(**params)
1504def _update_lut_freesurder(lut):
1505 """Update LUT formatted for Freesurfer."""
1506 lut = pd.concat(
1507 [
1508 pd.DataFrame([[0, "Background", 0, 0, 0, 0]], columns=lut.columns),
1509 lut,
1510 ],
1511 ignore_index=True,
1512 )
1513 lut["color"] = "#" + rgb_to_hex_lookup(lut.r, lut.g, lut.b).astype(str)
1514 lut = lut.drop(["r", "g", "b", "fs"], axis=1)
1515 return lut
1518@fill_doc
1519def fetch_atlas_aal(
1520 version="SPM12", data_dir=None, url=None, resume=True, verbose=1
1521):
1522 """Download and returns the AAL template for :term:`SPM` 12.
1524 This :term:`Deterministic atlas` is the result of an automated anatomical
1525 parcellation of the spatially normalized single-subject high-resolution
1526 T1 volume provided by the Montreal Neurological Institute (:term:`MNI`)
1527 (D. L. Collins et al., 1998, Trans. Med. Imag. 17, 463-468, PubMed).
1529 For more information on this dataset's structure,
1530 see :footcite:t:`AAL_atlas`,
1531 and :footcite:t:`Tzourio-Mazoyer2002`.
1533 .. warning::
1535 The integers in the map image (data.maps) that define the parcellation
1536 are not always consecutive, as is usually the case in Nilearn, and
1537 should not be interpreted as indices for the list of label names.
1538 In addition, the region IDs are provided as strings, so it is necessary
1539 to cast them to integers when indexing.
1540 For more information, refer to the fetcher's description:
1542 .. code-block:: python
1544 from nilearn.datasets import fetch_atlas_aal
1546 atlas = fetch_atlas_aal()
1547 print(atlas.description)
1549 Parameters
1550 ----------
1551 version : {'3v2', 'SPM12', 'SPM5', 'SPM8'}, default='SPM12'
1552 The version of the AAL atlas. Must be 'SPM5', 'SPM8', 'SPM12', or '3v2'
1553 for the latest SPM12 version of AAL3 software.
1554 %(data_dir)s
1555 %(url)s
1556 %(resume)s
1557 %(verbose)s
1559 Returns
1560 -------
1561 data : :class:`sklearn.utils.Bunch`
1562 Dictionary-like object, keys are:
1564 - 'maps': :obj:`str`
1565 Path to nifti file containing the regions.
1566 The image has shape ``(91, 109, 91)`` and contains
1567 117 unique integer values defining the parcellation in version
1568 SPM 5, 8 and 12, and 167 unique integer values defining the
1569 parcellation in version 3v2. Please refer to the main description
1570 to see how to link labels to regions IDs.
1572 - %(labels)s
1573 There are 117 names in version SPM 5, 8, and 12,
1574 and 167 names in version 3v2.
1575 Please refer to the main description
1576 to see how to link labels to regions IDs.
1578 - 'indices': :obj:`list` of :obj:`str`
1579 Indices mapping 'labels'
1580 to values in the 'maps' image.
1581 This list has 117 elements in
1582 version SPM 5, 8 and 12, and 167 elements in version 3v2.
1583 Since the values in the 'maps' image do not correspond to
1584 indices in ``labels``, but rather to values in ``indices``, the
1585 location of a label in the ``labels`` list does not necessary
1586 match the associated value in the image.
1587 Use the ``indices``
1588 list to identify the appropriate image value for a given label
1589 (See main description above).
1591 - %(description)s
1593 - %(lut)s
1595 - %(template)s
1597 - %(atlas_type)s
1600 Warns
1601 -----
1602 DeprecationWarning
1603 Starting in version 0.13, the default fetched mask will be AAL 3v2.
1605 References
1606 ----------
1607 .. footbibliography::
1609 Notes
1610 -----
1611 License: unknown.
1613 """
1614 check_params(locals())
1616 atlas_type = "deterministic"
1618 versions = ["SPM5", "SPM8", "SPM12", "3v2"]
1619 if version not in versions:
1620 raise ValueError(
1621 f"The version of AAL requested '{version}' does not exist."
1622 f"Please choose one among {versions}."
1623 )
1625 dataset_name = f"aal_{version}"
1626 opts = {"uncompress": True}
1628 if url is None:
1629 base_url = "https://www.gin.cnrs.fr/"
1630 if version == "SPM12":
1631 url = f"{base_url}AAL_files/aal_for_SPM12.tar.gz"
1632 basenames = ("AAL.nii", "AAL.xml")
1633 filenames = [
1634 (Path("aal", "atlas", f), url, opts) for f in basenames
1635 ]
1636 message = (
1637 "Starting in version 0.13, the default fetched mask will be"
1638 "AAL 3v2 instead."
1639 )
1640 warnings.warn(
1641 message, DeprecationWarning, stacklevel=find_stack_level()
1642 )
1644 elif version == "3v2":
1645 url = f"{base_url}wp-content/uploads/AAL3v2_for_SPM12.tar.gz"
1646 basenames = ("AAL3v1.nii", "AAL3v1.xml")
1647 filenames = [(Path("AAL3", f), url, opts) for f in basenames]
1648 else:
1649 url = f"{base_url}wp-content/uploads/aal_for_{version}.zip"
1650 basenames = ("ROI_MNI_V4.nii", "ROI_MNI_V4.txt")
1651 filenames = [
1652 (Path(f"aal_for_{version}", f), url, opts) for f in basenames
1653 ]
1655 data_dir = get_dataset_dir(
1656 dataset_name, data_dir=data_dir, verbose=verbose
1657 )
1658 atlas_img, labels_file = fetch_files(
1659 data_dir, filenames, resume=resume, verbose=verbose
1660 )
1661 fdescr = get_dataset_descr("aal")
1662 labels = ["Background"]
1663 indices = ["0"]
1664 if version in ("SPM12", "3v2"):
1665 xml_tree = ElementTree.parse(labels_file)
1666 root = xml_tree.getroot()
1667 for label in root.iter("label"):
1668 indices.append(label.find("index").text)
1669 labels.append(label.find("name").text)
1670 else:
1671 with Path(labels_file).open() as fp:
1672 for line in fp:
1673 _, label, index = line.strip().split("\t")
1674 indices.append(index)
1675 labels.append(label)
1676 fdescr = fdescr.replace("SPM 12", version)
1678 return Atlas(
1679 maps=atlas_img,
1680 labels=labels,
1681 description=fdescr,
1682 lut=generate_atlas_look_up_table(
1683 "fetch_atlas_aal",
1684 index=np.array([int(x) for x in indices]),
1685 name=labels,
1686 ),
1687 atlas_type=atlas_type,
1688 template="MNIColin27",
1689 indices=indices,
1690 )
1693@fill_doc
1694def fetch_atlas_basc_multiscale_2015(
1695 data_dir=None,
1696 url=None,
1697 resume=True,
1698 verbose=1,
1699 resolution=None,
1700 version="sym",
1701):
1702 """Download and load multiscale functional brain parcellations.
1704 This :term:`Deterministic atlas` includes group brain parcellations
1705 generated from resting-state
1706 :term:`functional magnetic resonance images<fMRI>` from about 200 young
1707 healthy subjects.
1709 Multiple resolutions (number of networks) are available, among
1710 7, 12, 20, 36, 64, 122, 197, 325, 444. The brain parcellations
1711 have been generated using a method called bootstrap analysis of
1712 stable clusters called as BASC :footcite:t:`Bellec2010`,
1713 and the resolutions have been selected using a data-driven method
1714 called MSTEPS :footcite:t:`Bellec2013`.
1716 Note that two versions of the template are available, 'sym' or 'asym'.
1717 The 'asym' type contains brain images that have been registered in the
1718 asymmetric version of the :term:`MNI` brain template (reflecting that
1719 the brain is asymmetric), while the 'sym' type contains images registered
1720 in the symmetric version of the :term:`MNI` template.
1721 The symmetric template has been forced to be symmetric anatomically, and
1722 is therefore ideally suited to study homotopic functional connections in
1723 :term:`fMRI`: finding homotopic regions simply consists of flipping the
1724 x-axis of the template.
1726 .. versionadded:: 0.2.3
1728 Parameters
1729 ----------
1730 %(data_dir)s
1732 %(url)s
1734 %(resume)s
1736 %(verbose)s
1738 resolution : :obj:`int`, default=None
1739 Number of networks in the dictionary.
1740 Valid resolutions available are
1741 {7, 12, 20, 36, 64, 122, 197, 325, 444}
1743 version : {'sym', 'asym'}, default='sym'
1744 Available versions are 'sym' or 'asym'.
1745 By default all scales of brain parcellations of version 'sym'
1746 will be returned.
1748 Returns
1749 -------
1750 data : :class:`sklearn.utils.Bunch`
1751 Dictionary-like object, Keys are:
1753 - "scale007", "scale012", "scale020", "scale036", "scale064", \
1754 "scale122", "scale197", "scale325", "scale444": :obj:`str`
1755 Path to Nifti file of various scales of brain parcellations.
1756 Images have shape ``(53, 64, 52)`` and contain consecutive integer
1757 values from 0 to the selected number of networks (scale).
1759 - %(description)s
1761 - %(lut)s
1763 - %(template)s
1765 - %(atlas_type)s
1767 Warns
1768 -----
1769 DeprecationWarning
1770 If a resolution input is provided, the current behavior
1771 (returning multiple maps) is deprecated.
1772 Starting in version 0.13, one map will be returned in a 'maps' dict key
1773 depending on the resolution and version value.
1775 References
1776 ----------
1777 .. footbibliography::
1779 Notes
1780 -----
1781 For more information on this dataset's structure, see
1782 https://figshare.com/articles/dataset/Group_multiscale_functional_template_generated_with_BASC_on_the_Cambridge_sample/1285615
1784 """
1785 check_params(locals())
1787 atlas_type = "deterministic"
1789 versions = ["sym", "asym"]
1790 if version not in versions:
1791 raise ValueError(
1792 f"The version of Brain parcellations requested '{version}' "
1793 "does not exist. "
1794 f"Please choose one among them {versions}."
1795 )
1797 file_number = "1861819" if version == "sym" else "1861820"
1798 url = f"https://ndownloader.figshare.com/files/{file_number}"
1800 opts = {"uncompress": True}
1802 keys = [
1803 "scale007",
1804 "scale012",
1805 "scale020",
1806 "scale036",
1807 "scale064",
1808 "scale122",
1809 "scale197",
1810 "scale325",
1811 "scale444",
1812 ]
1814 dataset_name = "basc_multiscale_2015"
1815 data_dir = get_dataset_dir(
1816 dataset_name, data_dir=data_dir, verbose=verbose
1817 )
1819 folder_name = Path(f"template_cambridge_basc_multiscale_nii_{version}")
1820 fdescr = get_dataset_descr(dataset_name)
1822 if resolution:
1823 basename = (
1824 "template_cambridge_basc_multiscale_"
1825 + version
1826 + f"_scale{resolution:03}"
1827 + ".nii.gz"
1828 )
1830 filename = [(folder_name / basename, url, opts)]
1832 data = fetch_files(data_dir, filename, resume=resume, verbose=verbose)
1834 labels = ["Background"] + [str(x) for x in range(1, resolution + 1)]
1836 return Atlas(
1837 maps=data[0],
1838 labels=labels,
1839 description=fdescr,
1840 lut=generate_atlas_look_up_table(
1841 "fetch_atlas_basc_multiscale_2015", name=labels
1842 ),
1843 atlas_type=atlas_type,
1844 template=f"MNI152{version}",
1845 )
1847 warnings.warn(
1848 category=DeprecationWarning,
1849 message=(
1850 deprecation_message.format(version="0.13")
1851 + (
1852 "To suppress this warning, "
1853 "Please use the parameters 'resolution' and 'version' "
1854 "to specify the exact atlas image you want."
1855 )
1856 ),
1857 stacklevel=find_stack_level(),
1858 )
1860 basenames = [
1861 "template_cambridge_basc_multiscale_" + version + "_" + key + ".nii.gz"
1862 for key in keys
1863 ]
1864 filenames = [(folder_name / basename, url, opts) for basename in basenames]
1865 data = fetch_files(data_dir, filenames, resume=resume, verbose=verbose)
1867 params = dict(zip(keys, data))
1868 params["description"] = fdescr
1869 params["atlas_type"] = atlas_type
1871 return Bunch(**params)
1874@fill_doc
1875def fetch_coords_dosenbach_2010(ordered_regions=True):
1876 """Load the Dosenbach et al 160 ROIs.
1878 These ROIs cover much of the cerebral cortex
1879 and cerebellum and are assigned to 6 networks.
1881 See :footcite:t:`Dosenbach2010`.
1883 Parameters
1884 ----------
1885 ordered_regions : :obj:`bool`, default=True
1886 ROIs from same networks are grouped together and ordered with respect
1887 to their names and their locations (anterior to posterior).
1889 Returns
1890 -------
1891 data : :class:`sklearn.utils.Bunch`
1892 Dictionary-like object, contains:
1894 - 'rois': :class:`pandas.DataFrame` with the coordinates
1895 of the 160 ROIs in :term:`MNI` space.
1897 - %(labels)s
1899 - 'networks': :class:`numpy.ndarray` of :obj:`str`, list of network
1900 names for the 160 ROI.
1902 - %(description)s
1904 References
1905 ----------
1906 .. footbibliography::
1908 """
1909 dataset_name = "dosenbach_2010"
1910 fdescr = get_dataset_descr(dataset_name)
1911 csv = PACKAGE_DIRECTORY / "data" / "dosenbach_2010.csv"
1912 out_csv = pd.read_csv(csv)
1914 if ordered_regions:
1915 out_csv = out_csv.sort_values(by=["network", "name", "y"])
1917 # We add the ROI number to its name, since names are not unique
1918 names = out_csv["name"]
1919 numbers = out_csv["number"]
1920 labels = [f"{name} {number}" for (name, number) in zip(names, numbers)]
1921 params = {
1922 "rois": out_csv[["x", "y", "z"]],
1923 "labels": labels,
1924 "networks": out_csv["network"],
1925 "description": fdescr,
1926 }
1928 return Bunch(**params)
1931@fill_doc
1932def fetch_coords_seitzman_2018(ordered_regions=True):
1933 """Load the Seitzman et al. 300 ROIs.
1935 These ROIs cover cortical, subcortical and cerebellar regions and are
1936 assigned to one of 13 networks (Auditory, CinguloOpercular, DefaultMode,
1937 DorsalAttention, FrontoParietal, MedialTemporalLobe, ParietoMedial,
1938 Reward, Salience, SomatomotorDorsal, SomatomotorLateral, VentralAttention,
1939 Visual) and have a regional label (cortexL, cortexR, cerebellum, thalamus,
1940 hippocampus, basalGanglia, amygdala, cortexMid).
1942 See :footcite:t:`Seitzman2020`.
1944 .. versionadded:: 0.5.1
1946 Parameters
1947 ----------
1948 ordered_regions : :obj:`bool`, default=True
1949 ROIs from same networks are grouped together and ordered with respect
1950 to their locations (anterior to posterior).
1952 Returns
1953 -------
1954 data : :class:`sklearn.utils.Bunch`
1955 Dictionary-like object, contains:
1957 - 'rois': :class:`pandas.DataFrame` with the coordinates
1958 of the 300 ROIs in :term:`MNI` space.
1960 - 'radius': :class:`numpy.ndarray` of :obj:`int`
1961 Radius of each ROI in mm.
1963 - 'networks': :class:`numpy.ndarray` of :obj:`str`
1964 Names of the corresponding network for each ROI.
1966 - 'regions': :class:`numpy.ndarray` of :obj:`str`
1967 Names of the regions.
1969 - %(description)s
1971 References
1972 ----------
1973 .. footbibliography::
1975 """
1976 dataset_name = "seitzman_2018"
1977 fdescr = get_dataset_descr(dataset_name)
1978 roi_file = (
1979 PACKAGE_DIRECTORY
1980 / "data"
1981 / "seitzman_2018_ROIs_300inVol_MNI_allInfo.txt"
1982 )
1983 anatomical_file = (
1984 PACKAGE_DIRECTORY / "data" / "seitzman_2018_ROIs_anatomicalLabels.txt"
1985 )
1987 rois = pd.read_csv(roi_file, delimiter=" ")
1988 rois = rois.rename(columns={"netName": "network", "radius(mm)": "radius"})
1990 # get integer regional labels and convert to text labels with mapping
1991 # from header line
1992 with anatomical_file.open() as fi:
1993 header = fi.readline()
1994 region_mapping = {}
1995 for r in header.strip().split(","):
1996 i, region = r.split("=")
1997 region_mapping[int(i)] = region
1999 anatomical = np.genfromtxt(anatomical_file, skip_header=1, encoding=None)
2000 anatomical_names = np.array([region_mapping[a] for a in anatomical])
2002 rois = pd.concat([rois, pd.DataFrame(anatomical_names)], axis=1)
2003 rois.columns = [*rois.columns[:-1], "region"]
2005 if ordered_regions:
2006 rois = rois.sort_values(by=["network", "y"])
2008 params = {
2009 "rois": rois[["x", "y", "z"]],
2010 "radius": np.array(rois["radius"]),
2011 "networks": np.array(rois["network"]),
2012 "regions": np.array(rois["region"]),
2013 "description": fdescr,
2014 }
2016 return Bunch(**params)
2019@fill_doc
2020def fetch_atlas_allen_2011(data_dir=None, url=None, resume=True, verbose=1):
2021 """Download and return file names for the Allen and MIALAB :term:`ICA` \
2022 :term:`Probabilistic atlas` (dated 2011).
2024 See :footcite:t:`Allen2011`.
2026 The provided images are in MNI152 space.
2028 Parameters
2029 ----------
2030 %(data_dir)s
2031 %(url)s
2032 %(resume)s
2033 %(verbose)s
2035 Returns
2036 -------
2037 data : :class:`sklearn.utils.Bunch`
2038 Dictionary-like object, keys are:
2040 - 'maps': :obj:`str`
2041 Path to nifti file containing the
2042 T-maps of all 75 unthresholded components.
2043 The image has shape ``(53, 63, 46, 75)``.
2045 - 'rsn28': :obj:`str`
2046 Path to nifti file containing the
2047 T-maps of 28 RSNs included in :footcite:t:`Allen2011`.
2048 The image has shape ``(53, 63, 46, 28)``.
2050 - 'networks': :obj:`list` of :obj:`list` of :obj:`str`
2051 List containing the names for the 28 RSNs.
2053 - 'rsn_indices': :obj:`list` of :obj:`tuple`, each tuple is a \
2054 (:obj:`str`, :obj:`list` of :`int`).
2055 This maps the network names to the map indices.
2056 For example, the map indices for the 'Visual' network
2057 can be obtained:
2059 .. code-block:: python
2061 # Should return [46, 64, 67, 48, 39, 59]
2062 dict(data.rsn_indices)["Visual"]
2064 - 'comps': :obj:`str`
2065 Path to nifti file containing the aggregate :term:`ICA` components.
2067 - %(description)s
2069 - %(atlas_type)s
2071 - %(template)s
2073 References
2074 ----------
2075 .. footbibliography::
2077 Notes
2078 -----
2079 License: unknown
2081 See https://trendscenter.org/data/ for more information
2082 on this dataset.
2084 """
2085 check_params(locals())
2087 atlas_type = "probabilistic"
2089 if url is None:
2090 url = "https://osf.io/hrcku/download"
2092 dataset_name = "allen_rsn_2011"
2093 keys = ("maps", "rsn28", "comps")
2095 opts = {"uncompress": True}
2096 files = [
2097 "ALL_HC_unthresholded_tmaps.nii.gz",
2098 "RSN_HC_unthresholded_tmaps.nii.gz",
2099 "rest_hcp_agg__component_ica_.nii.gz",
2100 ]
2102 labels = [
2103 ("Basal Ganglia", [21]),
2104 ("Auditory", [17]),
2105 ("Sensorimotor", [7, 23, 24, 38, 56, 29]),
2106 ("Visual", [46, 64, 67, 48, 39, 59]),
2107 ("Default-Mode", [50, 53, 25, 68]),
2108 ("Attentional", [34, 60, 52, 72, 71, 55]),
2109 ("Frontal", [42, 20, 47, 49]),
2110 ]
2112 networks = [[name] * len(idxs) for name, idxs in labels]
2114 filenames = [(Path("allen_rsn_2011", f), url, opts) for f in files]
2116 data_dir = get_dataset_dir(
2117 dataset_name, data_dir=data_dir, verbose=verbose
2118 )
2119 sub_files = fetch_files(
2120 data_dir, filenames, resume=resume, verbose=verbose
2121 )
2123 fdescr = get_dataset_descr(dataset_name)
2125 params = [
2126 ("description", fdescr),
2127 ("atlas_type", atlas_type),
2128 ("rsn_indices", labels),
2129 ("networks", networks),
2130 ("template", "MNI152"),
2131 *list(zip(keys, sub_files)),
2132 ]
2133 return Bunch(**dict(params))
2136@fill_doc
2137def fetch_atlas_surf_destrieux(
2138 data_dir=None, url=None, resume=True, verbose=1
2139):
2140 """Download and load Destrieux et al, 2010 cortical \
2141 :term:`Deterministic atlas`.
2143 See :footcite:t:`Destrieux2010`.
2145 This atlas returns 76 labels per hemisphere based on sulco-gryal patterns
2146 as distributed with Freesurfer in fsaverage5 surface space.
2148 .. versionadded:: 0.3
2150 Parameters
2151 ----------
2152 %(data_dir)s
2153 %(url)s
2154 %(resume)s
2155 %(verbose)s
2157 Returns
2158 -------
2159 data : :class:`sklearn.utils.Bunch`
2160 Dictionary-like object, contains:
2162 - %(labels)s
2164 - 'map_left': :class:`numpy.ndarray` of :obj:`int`
2165 Maps each vertex on the left hemisphere
2166 of the fsaverage5 surface to its index
2167 into the list of label name.
2169 - 'map_right': :class:`numpy.ndarray` of :obj:`int`
2170 Maps each :term:`vertex` on the right hemisphere
2171 of the fsaverage5 surface to its index
2172 into the list of label name.
2174 - %(description)s
2176 - %(lut)s
2178 - %(template)s
2180 - %(atlas_type)s
2182 See Also
2183 --------
2184 nilearn.datasets.fetch_surf_fsaverage
2186 References
2187 ----------
2188 .. footbibliography::
2190 """
2191 check_params(locals())
2193 atlas_type = "deterministic"
2195 if url is None:
2196 url = "https://www.nitrc.org/frs/download.php/"
2198 dataset_name = "destrieux_surface"
2199 fdescr = get_dataset_descr(dataset_name)
2200 data_dir = get_dataset_dir(
2201 dataset_name, data_dir=data_dir, verbose=verbose
2202 )
2204 # Download annot files, fsaverage surfaces and sulcal information
2205 annot_file = "%s.aparc.a2009s.annot"
2206 annot_url = url + "%i/%s.aparc.a2009s.annot"
2207 annot_nids = {"lh annot": 9343, "rh annot": 9342}
2209 annots = []
2210 for hemi in [("lh", "left"), ("rh", "right")]:
2211 annot = fetch_files(
2212 data_dir,
2213 [
2214 (
2215 annot_file % (hemi[1]),
2216 annot_url % (annot_nids[f"{hemi[0]} annot"], hemi[0]),
2217 {"move": annot_file % (hemi[1])},
2218 )
2219 ],
2220 resume=resume,
2221 verbose=verbose,
2222 )[0]
2223 annots.append(annot)
2225 annot_left = freesurfer.read_annot(annots[0])
2226 annot_right = freesurfer.read_annot(annots[1])
2228 labels = [x.decode("utf-8") for x in annot_left[2]]
2229 lut = generate_atlas_look_up_table(
2230 "fetch_atlas_surf_destrieux", name=labels
2231 )
2232 check_look_up_table(lut=lut, atlas=annot_left[0])
2233 check_look_up_table(lut=lut, atlas=annot_right[0])
2235 return Bunch(
2236 labels=labels,
2237 map_left=annot_left[0],
2238 map_right=annot_right[0],
2239 description=fdescr,
2240 lut=lut,
2241 atlas_type=atlas_type,
2242 template="fsaverage",
2243 )
2246def _separate_talairach_levels(atlas_img, labels, output_dir, verbose):
2247 """Separate the multiple annotation levels in talairach raw atlas.
2249 The Talairach atlas has five levels of annotation: hemisphere, lobe, gyrus,
2250 tissue, brodmann area. They are mixed up in the original atlas: each label
2251 in the atlas corresponds to a 5-tuple containing, for each of these levels,
2252 a value or the string '*' (meaning undefined, background).
2254 This function disentangles the levels, and stores each in a separate image.
2256 The label '*' is replaced by 'Background' for clarity.
2257 """
2258 logger.log(
2259 f"Separating talairach atlas levels: {_TALAIRACH_LEVELS}",
2260 verbose=verbose,
2261 )
2262 for level_name, old_level_labels in zip(
2263 _TALAIRACH_LEVELS, np.asarray(labels).T
2264 ):
2265 logger.log(level_name, verbose=verbose)
2266 # level with most regions, ba, has 72 regions
2267 level_data = np.zeros(atlas_img.shape, dtype="uint8")
2268 level_labels = {"*": 0}
2269 for region_nb, region_name in enumerate(old_level_labels):
2270 level_labels.setdefault(region_name, len(level_labels))
2271 level_data[get_img_data(atlas_img) == region_nb] = level_labels[
2272 region_name
2273 ]
2274 new_img_like(atlas_img, level_data).to_filename(
2275 output_dir / f"{level_name}.nii.gz"
2276 )
2278 level_labels = list(level_labels.keys())
2279 # rename '*' -> 'Background'
2280 level_labels[0] = "Background"
2281 (output_dir / f"{level_name}-labels.json").write_text(
2282 json.dumps(level_labels), "utf-8"
2283 )
2286def _download_talairach(talairach_dir, verbose):
2287 """Download the Talairach atlas and separate the different levels."""
2288 atlas_url = "https://www.talairach.org/talairach.nii"
2289 temp_dir = mkdtemp()
2290 try:
2291 temp_file = fetch_files(
2292 temp_dir, [("talairach.nii", atlas_url, {})], verbose=verbose
2293 )[0]
2294 atlas_img = load(temp_file, mmap=False)
2295 atlas_img = check_niimg(atlas_img)
2296 finally:
2297 shutil.rmtree(temp_dir)
2298 labels_text = atlas_img.header.extensions[0].get_content()
2299 multi_labels = labels_text.strip().decode("utf-8").split("\n")
2300 labels = [lab.split(".") for lab in multi_labels]
2301 _separate_talairach_levels(
2302 atlas_img, labels, talairach_dir, verbose=verbose
2303 )
2306@fill_doc
2307def fetch_atlas_talairach(level_name, data_dir=None, verbose=1):
2308 """Download the Talairach :term:`Deterministic atlas`.
2310 For more information, see :footcite:t:`talairach_atlas`,
2311 :footcite:t:`Lancaster2000`,
2312 and :footcite:t:`Lancaster1997`.
2314 .. versionadded:: 0.4.0
2316 Parameters
2317 ----------
2318 level_name : {'hemisphere', 'lobe', 'gyrus', 'tissue', 'ba'}
2319 Which level of the atlas to use: the hemisphere, the lobe, the gyrus,
2320 the tissue type or the Brodmann area.
2321 %(data_dir)s
2322 %(verbose)s
2324 Returns
2325 -------
2326 data : :class:`sklearn.utils.Bunch`
2327 Dictionary-like object, contains:
2329 - 'maps': 3D :class:`~nibabel.nifti1.Nifti1Image`
2330 The image has
2331 shape ``(141, 172, 110)`` and contains consecutive integer
2332 values from 0 to the number of regions, which are indices
2333 in the list of labels.
2335 - %(labels)s
2337 The list starts with 'Background' (region ID 0 in the image).
2339 - %(description)s
2341 - %(lut)s
2343 - %(template)s
2345 - %(atlas_type)s
2347 References
2348 ----------
2349 .. footbibliography::
2351 """
2352 check_params(locals())
2354 atlas_type = "deterministic"
2356 if level_name not in _TALAIRACH_LEVELS:
2357 raise ValueError(f'"level_name" should be one of {_TALAIRACH_LEVELS}')
2358 talairach_dir = get_dataset_dir(
2359 "talairach_atlas", data_dir=data_dir, verbose=verbose
2360 )
2362 img_file = talairach_dir / f"{level_name}.nii.gz"
2363 labels_file = talairach_dir / f"{level_name}-labels.json"
2365 if not img_file.is_file() or not labels_file.is_file():
2366 _download_talairach(talairach_dir, verbose=verbose)
2368 atlas_img = check_niimg(img_file)
2369 labels = json.loads(labels_file.read_text("utf-8"))
2371 return Atlas(
2372 maps=atlas_img,
2373 labels=labels,
2374 description=get_dataset_descr("talairach_atlas").format(level_name),
2375 lut=generate_atlas_look_up_table("fetch_atlas_talairach", name=labels),
2376 atlas_type=atlas_type,
2377 template="Talairach",
2378 )
2381@rename_parameters(
2382 replacement_params={"version": "atlas_type"}, end_version="0.13.1"
2383)
2384@fill_doc
2385def fetch_atlas_pauli_2017(
2386 atlas_type="probabilistic", data_dir=None, verbose=1
2387):
2388 """Download the Pauli et al. (2017) atlas.
2390 This atlas has 12 subcortical nodes in total. See
2391 :footcite:t:`pauli_atlas` and :footcite:t:`Pauli2018`.
2393 Parameters
2394 ----------
2395 atlas_type : {'probabilistic', 'deterministic'}, default='probabilistic'
2396 Which type of the atlas should be download. This can be
2397 'probabilistic' for the :term:`Probabilistic atlas`, or 'deterministic'
2398 for the :term:`Deterministic atlas`.
2399 %(data_dir)s
2400 %(verbose)s
2402 Returns
2403 -------
2404 data : :class:`sklearn.utils.Bunch`
2405 Dictionary-like object, contains:
2407 - 'maps': :obj:`str`,
2408 path to nifti file containing the
2409 :class:`~nibabel.nifti1.Nifti1Image`.
2410 If ``atlas_type='probabilistic'``,
2411 the image shape is ``(193, 229, 193, 16)``.
2412 If ``atlas_type='deterministic'`` the image shape is
2413 ``(198, 263, 212)``, and values are indices in the list of labels
2414 (integers from 0 to 16).
2416 - %(labels)s
2417 The list contains values for both
2418 :term:`probabilitic<Probabilistic atlas>` and
2419 :term:`deterministic<Deterministic atlas>` types.
2421 - %(description)s
2423 - %(lut)s
2424 Only when atlas_type="deterministic"
2426 - %(template)s
2428 - %(atlas_type)s
2431 Warns
2432 -----
2433 DeprecationWarning
2434 The possible values for atlas_type are currently 'prob' and 'det'. From
2435 release 0.13.0 onwards, atlas_type will accept only 'probabilistic' or
2436 'deterministic' as value.
2438 References
2439 ----------
2440 .. footbibliography::
2442 """
2443 check_params(locals())
2445 # TODO: remove this part after release 0.13.0
2446 if atlas_type in ("prob", "det"):
2447 atlas_type_values = (
2448 "The possible values for atlas_type are currently 'prob' and"
2449 " 'det'. From release 0.13.0 onwards, atlas_type will accept only"
2450 " 'probabilistic' or 'deterministic' as value."
2451 )
2452 warnings.warn(
2453 category=DeprecationWarning,
2454 message=atlas_type_values,
2455 stacklevel=find_stack_level(),
2456 )
2457 atlas_type = (
2458 "probabilistic" if atlas_type == "prob" else "deterministic"
2459 )
2461 if atlas_type not in {"probabilistic", "deterministic"}:
2462 raise NotImplementedError(
2463 f"{atlas_type} is not a valid type for the Pauli atlas"
2464 )
2466 url_maps = "https://osf.io/w8zq2/download"
2467 filename = "pauli_2017_prob.nii.gz"
2468 if atlas_type == "deterministic":
2469 url_maps = "https://osf.io/5mqfx/download"
2470 filename = "pauli_2017_det.nii.gz"
2472 url_labels = "https://osf.io/6qrcb/download"
2473 dataset_name = "pauli_2017"
2475 data_dir = get_dataset_dir(
2476 dataset_name, data_dir=data_dir, verbose=verbose
2477 )
2479 files = [
2480 (filename, url_maps, {"move": filename}),
2481 ("labels.txt", url_labels, {"move": "labels.txt"}),
2482 ]
2483 atlas_file, labels = fetch_files(data_dir, files)
2485 labels = np.loadtxt(labels, dtype=str)[:, 1].tolist()
2487 return Atlas(
2488 maps=atlas_file,
2489 labels=labels,
2490 description=get_dataset_descr(dataset_name),
2491 lut=generate_atlas_look_up_table(
2492 "fetch_atlas_pauli_2017", name=labels
2493 ),
2494 atlas_type=atlas_type,
2495 )
2498@fill_doc
2499def fetch_atlas_schaefer_2018(
2500 n_rois=400,
2501 yeo_networks=7,
2502 resolution_mm=1,
2503 data_dir=None,
2504 base_url=None,
2505 resume=True,
2506 verbose=1,
2507):
2508 """Download and return file names for the Schaefer 2018 parcellation.
2510 .. versionadded:: 0.5.1
2512 This function returns a :term:`Deterministic atlas`, and the provided
2513 images are in MNI152 space.
2515 For more information on this dataset, see :footcite:t:`schaefer_atlas`,
2516 :footcite:t:`Schaefer2017`,
2517 and :footcite:t:`Yeo2011`.
2519 Parameters
2520 ----------
2521 n_rois : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}, default=400
2522 Number of regions of interest.
2524 yeo_networks : {7, 17}, default=7
2525 ROI annotation according to yeo networks.
2527 resolution_mm : {1, 2}, default=1mm
2528 Spatial resolution of atlas image in mm.
2529 %(data_dir)s
2530 base_url : :obj:`str`, default=None
2531 Base URL of files to download (``None`` results in
2532 default ``base_url``).
2533 %(resume)s
2534 %(verbose)s
2536 Returns
2537 -------
2538 data : :class:`sklearn.utils.Bunch`
2539 Dictionary-like object, contains:
2541 - 'maps': :obj:`str`, path to nifti file containing the
2542 3D :class:`~nibabel.nifti1.Nifti1Image` (its shape is
2543 ``(182, 218, 182)``).
2544 The values are consecutive integers
2545 between 0 and ``n_rois`` which can be interpreted as indices
2546 in the list of labels.
2548 - %(labels)s
2550 - %(description)s
2552 - %(lut)s
2554 - %(template)s
2556 - %(atlas_type)s
2558 References
2559 ----------
2560 .. footbibliography::
2563 Notes
2564 -----
2565 Release v0.14.3 of the Schaefer 2018 parcellation is used by
2566 default. Versions prior to v0.14.3 are known to contain erroneous region
2567 label names. For more details, see
2568 https://github.com/ThomasYeoLab/CBIG/blob/master/stable_projects/brain_parcellation/Schaefer2018_LocalGlobal/Parcellations/Updates/Update_20190916_README.md
2570 License: MIT.
2572 """
2573 check_params(locals())
2575 atlas_type = "deterministic"
2577 valid_n_rois = list(range(100, 1100, 100))
2578 valid_yeo_networks = [7, 17]
2579 valid_resolution_mm = [1, 2]
2580 if n_rois not in valid_n_rois:
2581 raise ValueError(
2582 f"Requested n_rois={n_rois} not available. "
2583 f"Valid options: {valid_n_rois}"
2584 )
2585 if yeo_networks not in valid_yeo_networks:
2586 raise ValueError(
2587 f"Requested yeo_networks={yeo_networks} not available. "
2588 f"Valid options: {valid_yeo_networks}"
2589 )
2590 if resolution_mm not in valid_resolution_mm:
2591 raise ValueError(
2592 f"Requested resolution_mm={resolution_mm} not available. "
2593 f"Valid options: {valid_resolution_mm}"
2594 )
2596 if base_url is None:
2597 base_url = (
2598 "https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
2599 "v0.14.3-Update_Yeo2011_Schaefer2018_labelname/"
2600 "stable_projects/brain_parcellation/"
2601 "Schaefer2018_LocalGlobal/Parcellations/MNI/"
2602 )
2604 labels_file_template = "Schaefer2018_{}Parcels_{}Networks_order.txt"
2605 img_file_template = (
2606 "Schaefer2018_{}Parcels_{}Networks_order_FSLMNI152_{}mm.nii.gz"
2607 )
2608 files = [
2609 (f, base_url + f, {})
2610 for f in [
2611 labels_file_template.format(n_rois, yeo_networks),
2612 img_file_template.format(n_rois, yeo_networks, resolution_mm),
2613 ]
2614 ]
2616 dataset_name = "schaefer_2018"
2617 data_dir = get_dataset_dir(
2618 dataset_name, data_dir=data_dir, verbose=verbose
2619 )
2620 labels_file, atlas_file = fetch_files(
2621 data_dir, files, resume=resume, verbose=verbose
2622 )
2624 lut = pd.read_csv(
2625 labels_file,
2626 delimiter="\t",
2627 names=["index", "name", "r", "g", "b", "fs"],
2628 )
2629 lut = _update_lut_freesurder(lut)
2631 return Atlas(
2632 maps=atlas_file,
2633 labels=list(lut["name"]),
2634 description=get_dataset_descr(dataset_name),
2635 lut=lut,
2636 atlas_type=atlas_type,
2637 template="MNI152NLin6Asym",
2638 )
2641class Atlas(Bunch):
2642 """Sub class of Bunch to help standardize atlases.
2644 Parameters
2645 ----------
2646 maps : Niimg-like object or SurfaceImage object
2647 single image or list of images for that atlas
2649 description : str
2650 atlas description
2652 atlas_type: {"deterministic", "probabilistic"}
2654 labels: list of str
2655 labels for the atlas
2657 lut: pandas.DataFrame
2658 look up table for the atlas
2660 template: str
2661 name of the template used for the atlas
2662 """
2664 def __init__(
2665 self,
2666 maps,
2667 description,
2668 atlas_type,
2669 labels=None,
2670 lut=None,
2671 template=None,
2672 **kwargs,
2673 ):
2674 assert atlas_type in ["probabilistic", "deterministic"]
2676 # TODO: improve
2677 if template is None:
2678 template = "MNI?"
2680 if atlas_type == "probabilistic":
2681 if labels is None:
2682 super().__init__(
2683 maps=maps,
2684 description=description,
2685 atlas_type=atlas_type,
2686 template=template,
2687 **kwargs,
2688 )
2689 else:
2690 super().__init__(
2691 maps=maps,
2692 labels=labels,
2693 description=description,
2694 atlas_type=atlas_type,
2695 template=template,
2696 **kwargs,
2697 )
2699 return None
2701 check_look_up_table(lut=lut, atlas=maps)
2703 super().__init__(
2704 maps=maps,
2705 labels=lut.name.to_list(),
2706 description=description,
2707 lut=lut,
2708 atlas_type=atlas_type,
2709 template=template,
2710 **kwargs,
2711 )