Coverage for nilearn/glm/second_level/second_level.py: 12%

373 statements  

« prev     ^ index     » next       coverage.py v7.9.1, created at 2025-06-20 10:58 +0200

1"""Provide facilities to realize a second level analysis on lists of \ 

2first level contrasts or directly on fitted first level models. 

3""" 

4 

5import operator 

6import time 

7from pathlib import Path 

8from warnings import warn 

9 

10import numpy as np 

11import pandas as pd 

12from joblib import Memory 

13from nibabel import Nifti1Image 

14from nibabel.funcs import four_to_three 

15from sklearn.base import clone 

16from sklearn.utils.estimator_checks import check_is_fitted 

17 

18from nilearn._utils import fill_doc, logger 

19from nilearn._utils.cache_mixin import check_memory 

20from nilearn._utils.glm import check_and_load_tables 

21from nilearn._utils.logger import find_stack_level 

22from nilearn._utils.masker_validation import ( 

23 check_compatibility_mask_and_images, 

24 check_embedded_masker, 

25) 

26from nilearn._utils.niimg_conversions import check_niimg 

27from nilearn._utils.param_validation import check_params 

28from nilearn.glm._base import BaseGLM 

29from nilearn.glm.contrasts import ( 

30 compute_contrast, 

31 expression_to_contrast_vector, 

32) 

33from nilearn.glm.first_level import FirstLevelModel, run_glm 

34from nilearn.glm.first_level.design_matrix import ( 

35 make_second_level_design_matrix, 

36) 

37from nilearn.glm.regression import RegressionResults, SimpleRegressionResults 

38from nilearn.image import concat_imgs, iter_img, mean_img 

39from nilearn.maskers import NiftiMasker, SurfaceMasker 

40from nilearn.mass_univariate import permuted_ols 

41from nilearn.surface.surface import ( 

42 SurfaceImage, 

43) 

44from nilearn.surface.utils import check_polymesh_equal 

45from nilearn.typing import NiimgLike 

46 

47 

48def _input_type_error_message(second_level_input): 

49 return ( 

50 "second_level_input must be either:\n" 

51 "- a pandas DataFrame,\n" 

52 "- a Niimg-like object\n" 

53 "- a pandas Series of Niimg-like object\n" 

54 "- a list of Niimg-like objects\n" 

55 "- a list of 2D SurfaceImage objects\n" 

56 "- a 3D SurfaceImage object\n" 

57 "- a list of FirstLevelModel objects.\n" 

58 f"Got {_return_type(second_level_input)} instead." 

59 ) 

60 

61 

62def _check_second_level_input( 

63 second_level_input, design_matrix, confounds=None 

64): 

65 """Check second_level_input type.""" 

66 _check_design_matrix(design_matrix) 

67 

68 input_type = _check_input_type(second_level_input) 

69 _check_input_as_type( 

70 second_level_input, 

71 input_type, 

72 confounds is None, 

73 design_matrix is None, 

74 ) 

75 

76 

77def _check_input_type(second_level_input): 

78 """Determine the type of input provided.""" 

79 if isinstance(second_level_input, pd.DataFrame): 

80 return "df_object" 

81 if isinstance(second_level_input, pd.Series): 

82 return "pd_series" 

83 if isinstance(second_level_input, NiimgLike): 

84 return "nii_object" 

85 if isinstance(second_level_input, SurfaceImage): 

86 return "surf_img_object" 

87 if isinstance(second_level_input, list): 

88 return _check_input_type_when_list(second_level_input) 

89 raise TypeError(_input_type_error_message(second_level_input)) 

90 

91 

92def _return_type(second_level_input): 

93 if isinstance(second_level_input, list): 

94 return [type(x) for x in second_level_input] 

95 else: 

96 return type(second_level_input) 

97 

98 

99def _check_input_type_when_list(second_level_input): 

100 """Determine the type of input provided when it is a list.""" 

101 if len(second_level_input) < 2: 

102 raise TypeError( 

103 "A second level model requires a list with at" 

104 " least two first level models or niimgs or surface images." 

105 ) 

106 

107 _check_all_elements_of_same_type(second_level_input) 

108 

109 # Can now only check first element 

110 if isinstance(second_level_input[0], NiimgLike): 

111 return "nii_object" 

112 if isinstance(second_level_input[0], (FirstLevelModel)): 

113 return "flm_object" 

114 if isinstance(second_level_input[0], (SurfaceImage)): 

115 return "surf_img_object" 

116 raise TypeError(_input_type_error_message(second_level_input)) 

117 

118 

119def _check_all_elements_of_same_type(data): 

120 for idx, input in enumerate(data): 

121 if not isinstance(input, type(data[0])): 

122 raise TypeError( 

123 f"Elements of second_level_input must be of the same type." 

124 f" Got object type {type(input)} at idx {idx}." 

125 ) 

126 

127 

128def _check_input_as_type( 

129 second_level_input, input_type, none_confounds, none_design_matrix 

130): 

131 if input_type == "flm_object": 

132 _check_input_as_first_level_model(second_level_input, none_confounds) 

133 elif input_type == "pd_series": 

134 second_level_input = second_level_input.to_list() 

135 _check_input_as_nifti_images(second_level_input, none_design_matrix) 

136 elif input_type == "nii_object": 

137 _check_input_as_nifti_images(second_level_input, none_design_matrix) 

138 elif input_type == "surf_img_object": 

139 _check_input_as_surface_images(second_level_input, none_design_matrix) 

140 else: 

141 _check_input_as_dataframe(second_level_input) 

142 

143 

144INF = 1000 * np.finfo(np.float32).eps 

145 

146 

147def _check_input_as_first_level_model(second_level_input, none_confounds): 

148 """Check that all all first level models are valid. 

149 

150 - must have been fit 

151 - must all have a subject label in case confounds are passed 

152 - for volumetric analysis 

153 - must all have the same affine / shape 

154 (checking all against those of the first model) 

155 

156 """ 

157 ref_affine = None 

158 ref_shape = None 

159 

160 for model_idx, first_level in enumerate(second_level_input): 

161 if not first_level.__sklearn_is_fitted__(): 

162 raise ValueError( 

163 f"Model {first_level.subject_label} " 

164 f"at index {model_idx} has not been fit yet." 

165 ) 

166 if not none_confounds and first_level.subject_label is None: 

167 raise ValueError( 

168 "In case confounds are provided, " 

169 "first level objects need to provide " 

170 "the attribute 'subject_label' to match rows appropriately.\n" 

171 f"Model at idx {model_idx} does not provide it. " 

172 "To set it, you can do first_level.subject_label = '01'" 

173 ) 

174 

175 affine = None 

176 shape = None 

177 if first_level.mask_img is not None: 

178 if isinstance(first_level.mask_img, NiftiMasker): 

179 affine = first_level.mask_img.affine_ 

180 shape = first_level.mask_img.mask_img_.shape 

181 elif isinstance(first_level.mask_img, Nifti1Image): 

182 affine = first_level.mask_img.affine 

183 shape = first_level.mask_img.shape 

184 

185 # take as reference the first values we found 

186 if ref_affine is None: 

187 ref_affine = affine 

188 if ref_shape is None: 

189 ref_shape = shape 

190 

191 if ref_affine is not None and abs(affine - ref_affine).max() > INF: 

192 raise ValueError( 

193 "All first level models must have the same affine.\n" 

194 f"Model {first_level.subject_label} " 

195 f"at index {model_idx} has a different affine " 

196 "from the previous ones." 

197 ) 

198 

199 if shape != ref_shape: 

200 raise ValueError( 

201 "All first level models must have the same shape.\n" 

202 f"Model {first_level.subject_label} " 

203 f"at index {model_idx} has a different shape " 

204 "from the previous ones." 

205 ) 

206 

207 

208def _check_input_as_dataframe(second_level_input): 

209 for col in ("subject_label", "map_name", "effects_map_path"): 

210 if col not in second_level_input.columns: 

211 raise ValueError( 

212 "'second_level_input' DataFrame must have" 

213 " columns 'subject_label', 'map_name' and" 

214 " 'effects_map_path'." 

215 ) 

216 if not all( 

217 isinstance(_, str) 

218 for _ in second_level_input["subject_label"].tolist() 

219 ): 

220 raise ValueError("'subject_label' column must contain only strings.") 

221 

222 

223def _check_input_as_nifti_images(second_level_input, none_design_matrix): 

224 if isinstance(second_level_input, NiimgLike): 

225 second_level_input = [second_level_input] 

226 for niimg in second_level_input: 

227 check_niimg(niimg=niimg, atleast_4d=True) 

228 if none_design_matrix: 

229 raise ValueError( 

230 "List of niimgs as second_level_input" 

231 " require a design matrix to be provided." 

232 ) 

233 

234 

235def _check_input_as_surface_images(second_level_input, none_design_matrix): 

236 if isinstance(second_level_input, SurfaceImage) and ( 

237 len(second_level_input.shape) == 1 or second_level_input.shape[1] == 1 

238 ): 

239 raise TypeError( 

240 "If a single SurfaceImage object is passed " 

241 "as second_level_input," 

242 "it must be a 3D SurfaceImage." 

243 ) 

244 

245 if isinstance(second_level_input, list): 

246 for img in second_level_input[1:]: 

247 check_polymesh_equal(second_level_input[0].mesh, img.mesh) 

248 if none_design_matrix: 

249 raise ValueError( 

250 "List of SurfaceImage objects as second_level_input" 

251 " require a design matrix to be provided." 

252 ) 

253 

254 

255def _check_confounds(confounds): 

256 """Check confounds type.""" 

257 if confounds is not None: 

258 if not isinstance(confounds, pd.DataFrame): 

259 raise ValueError("confounds must be a pandas DataFrame") 

260 if "subject_label" not in confounds.columns: 

261 raise ValueError( 

262 "confounds DataFrame must contain column 'subject_label'" 

263 ) 

264 if len(confounds.columns) < 2: 

265 raise ValueError( 

266 "confounds should contain at least 2 columns" 

267 ' one called "subject_label" and the other' 

268 " with a given confound" 

269 ) 

270 # Make sure subject_label contain strings 

271 if not all( 

272 isinstance(_, str) for _ in confounds["subject_label"].tolist() 

273 ): 

274 raise ValueError("subject_label column must contain only strings") 

275 

276 

277def _check_first_level_contrast(second_level_input, first_level_contrast): 

278 if ( 

279 isinstance(second_level_input, list) 

280 and isinstance(second_level_input[0], FirstLevelModel) 

281 and first_level_contrast is None 

282 ): 

283 raise ValueError( 

284 "If second_level_input was a list of FirstLevelModel," 

285 " then first_level_contrast is mandatory. " 

286 "It corresponds to the second_level_contrast argument " 

287 "of the compute_contrast method of FirstLevelModel." 

288 ) 

289 

290 

291def _check_output_type(output_type, valid_types): 

292 if output_type not in valid_types: 

293 raise ValueError(f"output_type must be one of {valid_types}") 

294 

295 

296def _check_design_matrix(design_matrix): 

297 """Check design_matrix type.""" 

298 if design_matrix is not None and not isinstance( 

299 design_matrix, (str, Path, pd.DataFrame) 

300 ): 

301 raise TypeError( 

302 "'design_matrix' must be a " 

303 "str, pathlib.Path or a pandas.DataFrame.\n" 

304 f"Got {type(design_matrix)}" 

305 ) 

306 

307 

308def _check_n_rows_desmat_vs_n_effect_maps(effect_maps, design_matrix): 

309 """Check design matrix and effect maps agree on number of rows.""" 

310 if len(effect_maps) != design_matrix.shape[0]: 

311 raise ValueError( 

312 "design_matrix does not match the number of maps considered. " 

313 f"{design_matrix.shape[0]} rows in design matrix do not match " 

314 f"with {len(effect_maps)} maps." 

315 ) 

316 

317 

318def _get_con_val(second_level_contrast, design_matrix): 

319 """Check the contrast and return con_val \ 

320 when testing one contrast or more. 

321 """ 

322 if second_level_contrast is None: 

323 if design_matrix.shape[1] == 1: 

324 second_level_contrast = np.ones([1]) 

325 else: 

326 raise ValueError("No second-level contrast is specified.") 

327 if not isinstance(second_level_contrast, str): 

328 con_val = np.array(second_level_contrast) 

329 if np.all(con_val == 0) or len(con_val) == 0: 

330 raise ValueError( 

331 "Contrast is null. Second_level_contrast must be a valid " 

332 "contrast vector, a list/array of 0s and 1s, a string, or a " 

333 "string expression." 

334 ) 

335 else: 

336 design_columns = design_matrix.columns.tolist() 

337 con_val = expression_to_contrast_vector( 

338 second_level_contrast, design_columns 

339 ) 

340 return con_val 

341 

342 

343def _infer_effect_maps(second_level_input, contrast_def): 

344 """Deal with the different possibilities of second_level_input.""" 

345 if isinstance(second_level_input, SurfaceImage): 

346 return list(iter_img(second_level_input)) 

347 if isinstance(second_level_input, list) and isinstance( 

348 second_level_input[0], SurfaceImage 

349 ): 

350 return second_level_input 

351 

352 if isinstance(second_level_input, pd.DataFrame): 

353 # If a Dataframe was given, we expect contrast_def to be in map_name 

354 def _is_contrast_def(x): 

355 return x["map_name"] == contrast_def 

356 

357 is_con = second_level_input.apply(_is_contrast_def, axis=1) 

358 effect_maps = second_level_input[is_con]["effects_map_path"].tolist() 

359 

360 elif isinstance(second_level_input, list) and isinstance( 

361 second_level_input[0], FirstLevelModel 

362 ): 

363 # Get the first level model maps 

364 effect_maps = [] 

365 for model in second_level_input: 

366 effect_map = model.compute_contrast( 

367 contrast_def, output_type="effect_size" 

368 ) 

369 effect_maps.append(effect_map) 

370 else: 

371 effect_maps = second_level_input 

372 

373 # check niimgs 

374 for niimg in effect_maps: 

375 check_niimg(niimg, ensure_ndim=3) 

376 

377 return effect_maps 

378 

379 

380def _process_second_level_input(second_level_input): 

381 """Process second_level_input.""" 

382 if isinstance(second_level_input, pd.DataFrame): 

383 return _process_second_level_input_as_dataframe(second_level_input) 

384 elif hasattr(second_level_input, "__iter__") and isinstance( 

385 second_level_input[0], FirstLevelModel 

386 ): 

387 return _process_second_level_input_as_firstlevelmodels( 

388 second_level_input 

389 ) 

390 elif ( 

391 hasattr(second_level_input, "__iter__") 

392 and isinstance(second_level_input[0], SurfaceImage) 

393 ) or isinstance(second_level_input, SurfaceImage): 

394 return _process_second_level_input_as_surface_image(second_level_input) 

395 else: 

396 return mean_img(second_level_input, copy_header=True), None 

397 

398 

399def _process_second_level_input_as_dataframe(second_level_input): 

400 """Process second_level_input provided as a pandas DataFrame.""" 

401 sample_map = second_level_input["effects_map_path"][0] 

402 labels = second_level_input["subject_label"] 

403 subjects_label = labels.to_list() 

404 return sample_map, subjects_label 

405 

406 

407def _sort_input_dataframe(second_level_input): 

408 """Sort the pandas dataframe by subject_label to \ 

409 avoid inconsistencies with the design matrix row order when \ 

410 automatically extracting maps. 

411 """ 

412 columns = second_level_input.columns.tolist() 

413 column_index = columns.index("subject_label") 

414 sorted_matrix = sorted( 

415 second_level_input.values, key=operator.itemgetter(column_index) 

416 ) 

417 return pd.DataFrame(sorted_matrix, columns=columns) 

418 

419 

420def _process_second_level_input_as_firstlevelmodels(second_level_input): 

421 """Process second_level_input provided \ 

422 as a list of FirstLevelModel objects. 

423 """ 

424 sample_model = second_level_input[0] 

425 sample_condition = sample_model.design_matrices_[0].columns[0] 

426 sample_map = sample_model.compute_contrast( 

427 sample_condition, output_type="effect_size" 

428 ) 

429 labels = [model.subject_label for model in second_level_input] 

430 return sample_map, labels 

431 

432 

433def _process_second_level_input_as_surface_image(second_level_input): 

434 """Compute mean image across sample maps. 

435 

436 All should have the same underlying meshes. 

437 

438 Returns 

439 ------- 

440 sample_map: SurfaceImage with 3 dimensions 

441 

442 None 

443 """ 

444 if isinstance(second_level_input, SurfaceImage): 

445 return second_level_input, None 

446 

447 second_level_input = [mean_img(x) for x in second_level_input] 

448 sample_map = concat_imgs(second_level_input) 

449 return sample_map, None 

450 

451 

452@fill_doc 

453class SecondLevelModel(BaseGLM): 

454 """Implement the :term:`General Linear Model<GLM>` for multiple \ 

455 subject :term:`fMRI` data. 

456 

457 Parameters 

458 ---------- 

459 %(second_level_mask_img)s 

460 

461 %(target_affine)s 

462 

463 .. note:: 

464 This parameter is passed to :func:`nilearn.image.resample_img`. 

465 

466 .. note:: 

467 This parameter is ignored when fitting surface images. 

468 

469 %(target_shape)s 

470 

471 .. note:: 

472 This parameter is passed to :func:`nilearn.image.resample_img`. 

473 

474 .. note:: 

475 This parameter is ignored when fitting surface images. 

476 

477 %(smoothing_fwhm)s 

478 

479 .. note:: 

480 This parameter is ignored when fitting surface images. 

481 

482 %(memory)s 

483 

484 %(memory_level1)s 

485 

486 %(verbose0)s 

487 If 0 prints nothing. If 1 prints final computation time. 

488 If 2 prints masker computation details. 

489 

490 %(n_jobs)s 

491 

492 minimize_memory : :obj:`bool`, default=True 

493 Gets rid of some variables on the model fit results that are not 

494 necessary for contrast computation and would only be useful for 

495 further inspection of model details. This has an important impact 

496 on memory consumption. 

497 """ 

498 

499 def __str__(self): 

500 return "Second Level Model" 

501 

502 def __init__( 

503 self, 

504 mask_img=None, 

505 target_affine=None, 

506 target_shape=None, 

507 smoothing_fwhm=None, 

508 memory=None, 

509 memory_level=1, 

510 verbose=0, 

511 n_jobs=1, 

512 minimize_memory=True, 

513 ): 

514 self.mask_img = mask_img 

515 self.target_affine = target_affine 

516 self.target_shape = target_shape 

517 self.smoothing_fwhm = smoothing_fwhm 

518 self.memory = memory 

519 self.memory_level = memory_level 

520 self.verbose = verbose 

521 self.n_jobs = n_jobs 

522 self.minimize_memory = minimize_memory 

523 

524 @fill_doc 

525 def fit(self, second_level_input, confounds=None, design_matrix=None): 

526 """Fit the second-level :term:`GLM`. 

527 

528 1. create design matrix 

529 2. do a masker job: fMRI_data -> Y 

530 3. fit regression to (Y, X) 

531 

532 Parameters 

533 ---------- 

534 %(second_level_input)s 

535 

536 %(second_level_confounds)s 

537 

538 %(second_level_design_matrix)s 

539 

540 """ 

541 check_params(self.__dict__) 

542 self.second_level_input_ = None 

543 self.confounds_ = None 

544 self.labels_ = None 

545 self.results_ = None 

546 

547 self.memory = check_memory(self.memory) 

548 

549 # check second_level_input 

550 _check_second_level_input( 

551 second_level_input, design_matrix, confounds=confounds 

552 ) 

553 

554 # check confounds 

555 _check_confounds(confounds) 

556 

557 if isinstance(second_level_input, pd.DataFrame): 

558 second_level_input = _sort_input_dataframe(second_level_input) 

559 if isinstance(second_level_input, Nifti1Image): 

560 check_niimg(second_level_input, ensure_ndim=4) 

561 second_level_input = four_to_three(second_level_input) 

562 self.second_level_input_ = second_level_input 

563 

564 self.confounds_ = confounds 

565 

566 sample_map, subjects_label = _process_second_level_input( 

567 second_level_input 

568 ) 

569 

570 # Report progress 

571 t0 = time.time() 

572 logger.log( 

573 "Fitting second level model. Take a deep breath.\r", 

574 verbose=self.verbose, 

575 ) 

576 

577 # Create and set design matrix, if not given 

578 if design_matrix is None: 

579 design_matrix = make_second_level_design_matrix( 

580 subjects_label, confounds 

581 ) 

582 elif isinstance(design_matrix, (str, Path, pd.DataFrame)): 

583 design_matrix = check_and_load_tables( 

584 design_matrix, "design_matrix" 

585 )[0] 

586 self.design_matrix_ = design_matrix 

587 

588 masker_type = "nii" 

589 if not self._is_volume_glm() or isinstance(sample_map, SurfaceImage): 

590 masker_type = "surface" 

591 

592 if masker_type == "surface" and self.smoothing_fwhm is not None: 

593 warn( 

594 "Parameter 'smoothing_fwhm' is not " 

595 "yet supported for surface data.", 

596 UserWarning, 

597 stacklevel=find_stack_level(), 

598 ) 

599 self.smoothing_fwhm = None 

600 

601 check_compatibility_mask_and_images(self.mask_img, sample_map) 

602 self.masker_ = check_embedded_masker(self, masker_type) 

603 

604 self.masker_.fit(sample_map) 

605 

606 # Report progress 

607 logger.log( 

608 "\nComputation of second level model done in " 

609 f"{time.time() - t0:0.2f} seconds.\n", 

610 verbose=self.verbose, 

611 ) 

612 

613 self._reporting_data = {} 

614 

615 return self 

616 

617 def __sklearn_is_fitted__(self): 

618 return ( 

619 hasattr(self, "second_level_input_") 

620 and self.second_level_input_ is not None 

621 ) 

622 

623 @fill_doc 

624 def compute_contrast( 

625 self, 

626 second_level_contrast=None, 

627 first_level_contrast=None, 

628 second_level_stat_type=None, 

629 output_type="z_score", 

630 ): 

631 """Generate different outputs corresponding to \ 

632 the contrasts provided e.g. z_map, t_map, effects and variance. 

633 

634 Parameters 

635 ---------- 

636 %(second_level_contrast)s 

637 

638 %(first_level_contrast)s 

639 

640 second_level_stat_type : {'t', 'F'} or None, default=None 

641 Type of the second level contrast. 

642 

643 output_type : {'z_score', 'stat', 'p_value', \ 

644 :term:`'effect_size'<Parameter Estimate>`, \ 

645 'effect_variance', 'all'}, default='z_score' 

646 Type of the output map. 

647 

648 Returns 

649 ------- 

650 output_image : :class:`~nibabel.nifti1.Nifti1Image` 

651 The desired output image(s). 

652 If ``output_type == 'all'``, 

653 then the output is a dictionary of images, 

654 keyed by the type of image. 

655 

656 """ 

657 check_is_fitted(self) 

658 

659 # check first_level_contrast 

660 _check_first_level_contrast( 

661 self.second_level_input_, first_level_contrast 

662 ) 

663 

664 # check contrast and obtain con_val 

665 con_val = _get_con_val(second_level_contrast, self.design_matrix_) 

666 

667 # check output type 

668 # 'all' is assumed to be the final entry; 

669 # if adding more, place before 'all' 

670 valid_types = [ 

671 "z_score", 

672 "stat", 

673 "p_value", 

674 "effect_size", 

675 "effect_variance", 

676 "all", 

677 ] 

678 _check_output_type(output_type, valid_types) 

679 

680 # Get effect_maps appropriate for chosen contrast 

681 effect_maps = _infer_effect_maps( 

682 self.second_level_input_, first_level_contrast 

683 ) 

684 

685 _check_n_rows_desmat_vs_n_effect_maps(effect_maps, self.design_matrix_) 

686 

687 # Fit an Ordinary Least Squares regression for parametric statistics 

688 Y = self.masker_.transform(effect_maps) 

689 if self.memory: 

690 mem_glm = self.memory.cache(run_glm, ignore=["n_jobs"]) 

691 else: 

692 mem_glm = run_glm 

693 labels, results = mem_glm( 

694 Y, 

695 self.design_matrix_.values, 

696 n_jobs=self.n_jobs, 

697 noise_model="ols", 

698 ) 

699 

700 # We save memory if inspecting model details is not necessary 

701 if self.minimize_memory: 

702 for key in results: 

703 results[key] = SimpleRegressionResults(results[key]) 

704 self.labels_ = labels 

705 self.results_ = results 

706 

707 # We compute contrast object 

708 if self.memory: 

709 mem_contrast = self.memory.cache(compute_contrast) 

710 else: 

711 mem_contrast = compute_contrast 

712 contrast = mem_contrast( 

713 self.labels_, self.results_, con_val, second_level_stat_type 

714 ) 

715 

716 output_types = ( 

717 valid_types[:-1] if output_type == "all" else [output_type] 

718 ) 

719 

720 outputs = {} 

721 for output_type_ in output_types: 

722 # We get desired output from contrast object 

723 estimate_ = getattr(contrast, output_type_)() 

724 # Prepare the returned images 

725 output = self.masker_.inverse_transform(estimate_) 

726 contrast_name = str(con_val) 

727 if not isinstance(output, SurfaceImage): 

728 output.header["descrip"] = ( 

729 f"{output_type} of contrast {contrast_name}" 

730 ) 

731 outputs[output_type_] = output 

732 

733 return outputs if output_type == "all" else output 

734 

735 def _get_element_wise_model_attribute( 

736 self, attribute, result_as_time_series 

737 ): 

738 """Transform RegressionResults instances within a dictionary \ 

739 (whose keys represent the autoregressive coefficient under the 'ar1' \ 

740 noise model or only 0.0 under 'ols' noise_model and values are the \ 

741 RegressionResults instances) into input nifti space. 

742 

743 Parameters 

744 ---------- 

745 attribute : :obj:`str` 

746 an attribute of a RegressionResults instance. 

747 possible values include: 'residuals', 'normalized_residuals', 

748 'predicted', SSE, r_square, MSE. 

749 

750 result_as_time_series : :obj:`bool` 

751 whether the RegressionResult attribute has a value 

752 per timepoint of the input nifti image. 

753 

754 Returns 

755 ------- 

756 output : :obj:`list` 

757 A list of Nifti1Image(s). 

758 

759 """ 

760 check_is_fitted(self) 

761 # check if valid attribute is being accessed. 

762 all_attributes = dict(vars(RegressionResults)).keys() 

763 possible_attributes = [ 

764 prop for prop in all_attributes if "__" not in prop 

765 ] 

766 if attribute not in possible_attributes: 

767 msg = f"attribute must be one of: {possible_attributes}" 

768 raise ValueError(msg) 

769 

770 if self.minimize_memory: 

771 raise ValueError( 

772 "To access voxelwise attributes like " 

773 "R-squared, residuals, and predictions, " 

774 "the `SecondLevelModel`-object needs to store " 

775 "there attributes. " 

776 "To do so, set `minimize_memory` to `False` " 

777 "when initializing the `SecondLevelModel`-object." 

778 ) 

779 

780 if ( 

781 not hasattr(self, "labels_") 

782 or not hasattr(self, "results_") 

783 or self.labels_ is None 

784 or self.results_ is None 

785 ): 

786 raise ValueError( 

787 "The model has no results. No contrast has been computed yet." 

788 ) 

789 

790 if result_as_time_series: 

791 voxelwise_attribute = np.zeros( 

792 (self.design_matrix_.shape[0], len(self.labels_)) 

793 ) 

794 else: 

795 voxelwise_attribute = np.zeros((1, len(self.labels_))) 

796 

797 for label_ in self.results_: 

798 label_mask = self.labels_ == label_ 

799 voxelwise_attribute[:, label_mask] = getattr( 

800 self.results_[label_], attribute 

801 ) 

802 return self.masker_.inverse_transform(voxelwise_attribute) 

803 

804 def generate_report( 

805 self, 

806 contrasts=None, 

807 first_level_contrast=None, 

808 title=None, 

809 bg_img="MNI152TEMPLATE", 

810 threshold=3.09, 

811 alpha=0.001, 

812 cluster_threshold=0, 

813 height_control="fpr", 

814 two_sided=False, 

815 min_distance=8.0, 

816 plot_type="slice", 

817 cut_coords=None, 

818 display_mode=None, 

819 report_dims=(1600, 800), 

820 ): 

821 """Return a :class:`~nilearn.reporting.HTMLReport` \ 

822 which shows all important aspects of a fitted :term:`GLM`. 

823 

824 The :class:`~nilearn.reporting.HTMLReport` can be opened in a 

825 browser, displayed in a notebook, or saved to disk as a standalone 

826 HTML file. 

827 

828 The :term:`GLM` must be fitted and have the computed design 

829 matrix(ces). 

830 

831 .. note:: 

832 

833 Refer to the documentation of 

834 :func:`~nilearn.reporting.make_glm_report` 

835 for details about the parameters 

836 

837 Returns 

838 ------- 

839 report_text : :class:`~nilearn.reporting.HTMLReport` 

840 Contains the HTML code for the :term:`GLM` report. 

841 

842 """ 

843 from nilearn.reporting.glm_reporter import make_glm_report 

844 

845 if not hasattr(self, "_reporting_data"): 

846 self._reporting_data = { 

847 "trial_types": [], 

848 "noise_model": getattr(self, "noise_model", None), 

849 "hrf_model": getattr(self, "hrf_model", None), 

850 "drift_model": None, 

851 } 

852 

853 return make_glm_report( 

854 self, 

855 contrasts, 

856 first_level_contrast=first_level_contrast, 

857 title=title, 

858 bg_img=bg_img, 

859 threshold=threshold, 

860 alpha=alpha, 

861 cluster_threshold=cluster_threshold, 

862 height_control=height_control, 

863 two_sided=two_sided, 

864 min_distance=min_distance, 

865 plot_type=plot_type, 

866 cut_coords=cut_coords, 

867 display_mode=display_mode, 

868 report_dims=report_dims, 

869 ) 

870 

871 

872@fill_doc 

873def non_parametric_inference( 

874 second_level_input, 

875 confounds=None, 

876 design_matrix=None, 

877 second_level_contrast=None, 

878 first_level_contrast=None, 

879 mask=None, 

880 smoothing_fwhm=None, 

881 model_intercept=True, 

882 n_perm=10000, 

883 two_sided_test=False, 

884 random_state=None, 

885 n_jobs=1, 

886 verbose=0, 

887 threshold=None, 

888 tfce=False, 

889): 

890 """Generate p-values corresponding to the contrasts provided \ 

891 based on permutation testing. 

892 

893 This function is a light wrapper around 

894 :func:`~nilearn.mass_univariate.permuted_ols`, with additional steps to 

895 ensure compatibility with the :mod:`~nilearn.glm.second_level` module. 

896 

897 Parameters 

898 ---------- 

899 %(second_level_input)s 

900 

901 %(second_level_confounds)s 

902 

903 %(second_level_design_matrix)s 

904 

905 %(second_level_contrast)s 

906 

907 %(first_level_contrast)s 

908 

909 .. versionadded:: 0.9.0 

910 

911 %(second_level_mask)s 

912 

913 %(smoothing_fwhm)s 

914 

915 .. warning:: 

916 

917 Smoothing is not implemented for surface data. 

918 

919 model_intercept : :obj:`bool`, default=True 

920 If ``True``, a constant column is added to the confounding variates 

921 unless the tested variate is already the intercept. 

922 

923 %(n_perm)s 

924 

925 %(two_sided_test)s 

926 

927 %(random_state)s 

928 Use this parameter to have the same permutations in each 

929 computing units. 

930 

931 %(n_jobs)s 

932 

933 %(verbose0)s 

934 

935 threshold : None or :obj:`float`, default=None 

936 Cluster-forming threshold in p-scale. 

937 This is only used for cluster-level inference. 

938 If None, no cluster-level inference will be performed. 

939 

940 .. versionadded:: 0.9.2 

941 

942 .. warning:: 

943 

944 Performing cluster-level inference will increase the computation 

945 time of the permutation procedure. 

946 

947 .. warning:: 

948 

949 Cluster analysis are not implemented for surface data. 

950 

951 %(tfce)s 

952 

953 .. versionadded:: 0.9.2 

954 

955 .. warning:: 

956 

957 TFCE analysis are not implemented for surface data. 

958 

959 Returns 

960 ------- 

961 neg_log10_vfwe_pvals_img : :class:`~nibabel.nifti1.Nifti1Image` 

962 The image which contains negative logarithm of the 

963 voxel-level FWER-corrected p-values. 

964 

965 .. note:: 

966 This is returned if ``threshold`` is None (the default). 

967 

968 outputs : :obj:`dict` 

969 Output images, organized in a dictionary. 

970 Each image is 3D/4D, with the potential fourth dimension corresponding 

971 to the regressors. 

972 

973 .. note:: 

974 This is returned if ``tfce`` is True or ``threshold`` is not None. 

975 

976 .. versionadded:: 0.9.2 

977 

978 Here are the keys: 

979 

980 =============== ======================================================= 

981 key description 

982 =============== ======================================================= 

983 t T-statistics associated with the significance test of 

984 the n_regressors explanatory variates against the 

985 n_descriptors target variates. 

986 logp_max_t Negative log10 family-wise error rate-corrected 

987 p-values corrected based on the distribution of maximum 

988 t-statistics from permutations. 

989 size Cluster size values associated with the significance 

990 test of the n_regressors explanatory variates against 

991 the n_descriptors target variates. 

992 

993 Returned only if ``threshold`` is not ``None``. 

994 logp_max_size Negative log10 family-wise error rate-corrected 

995 p-values corrected based on the distribution of maximum 

996 cluster sizes from permutations. 

997 This map is generated through cluster-level methods, so 

998 the values in the map describe the significance of 

999 clusters, rather than individual voxels. 

1000 

1001 Returned only if ``threshold`` is not ``None``. 

1002 mass Cluster mass values associated with the significance 

1003 test of the n_regressors explanatory variates against 

1004 the n_descriptors target variates. 

1005 

1006 Returned only if ``threshold`` is not ``None``. 

1007 logp_max_mass Negative log10 family-wise error rate-corrected 

1008 p-values corrected based on the distribution of maximum 

1009 cluster masses from permutations. 

1010 This map is generated through cluster-level methods, so 

1011 the values in the map describe the significance of 

1012 clusters, rather than individual voxels. 

1013 

1014 Returned only if ``threshold`` is not ``None``. 

1015 tfce :term:`TFCE` values associated 

1016 with the significance test of 

1017 the n_regressors explanatory variates against the 

1018 n_descriptors target variates. 

1019 

1020 Returned only if ``tfce`` is ``True``. 

1021 logp_max_tfce Negative log10 family-wise error rate-corrected 

1022 p-values corrected based on the distribution of maximum 

1023 TFCE values from permutations. 

1024 

1025 Returned only if ``tfce`` is ``True``. 

1026 =============== ======================================================= 

1027 

1028 See Also 

1029 -------- 

1030 :func:`~nilearn.mass_univariate.permuted_ols` : For more information on \ 

1031 the permutation procedure. 

1032 

1033 References 

1034 ---------- 

1035 .. footbibliography:: 

1036 """ 

1037 check_params(locals()) 

1038 _check_second_level_input(second_level_input, design_matrix) 

1039 _check_confounds(confounds) 

1040 design_matrix = check_and_load_tables(design_matrix, "design_matrix")[0] 

1041 

1042 if isinstance(second_level_input, pd.DataFrame): 

1043 second_level_input = _sort_input_dataframe(second_level_input) 

1044 sample_map, _ = _process_second_level_input(second_level_input) 

1045 

1046 if isinstance(sample_map, SurfaceImage) and smoothing_fwhm is not None: 

1047 warn( 

1048 "Parameter 'smoothing_fwhm' is not " 

1049 "yet supported for surface data.", 

1050 UserWarning, 

1051 stacklevel=find_stack_level(), 

1052 ) 

1053 smoothing_fwhm = None 

1054 

1055 if (isinstance(sample_map, SurfaceImage)) and (tfce or threshold): 

1056 tfce = False 

1057 threshold = None 

1058 warn( 

1059 ( 

1060 "Cluster level inference not yet implemented " 

1061 "for surface data.\n" 

1062 f"Setting {tfce=} and {threshold=}." 

1063 ), 

1064 UserWarning, 

1065 stacklevel=find_stack_level(), 

1066 ) 

1067 

1068 # Report progress 

1069 t0 = time.time() 

1070 logger.log("Fitting second level model...", verbose=verbose) 

1071 

1072 # Learn the mask. Assume the first level imgs have been masked. 

1073 if isinstance(mask, (NiftiMasker, SurfaceMasker)): 

1074 masker = clone(mask) 

1075 if smoothing_fwhm is not None and masker.smoothing_fwhm is not None: 

1076 warn( 

1077 "Parameter 'smoothing_fwhm' of the masker overridden.", 

1078 stacklevel=find_stack_level(), 

1079 ) 

1080 masker.smoothing_fwhm = smoothing_fwhm 

1081 

1082 elif isinstance(sample_map, SurfaceImage): 

1083 masker = SurfaceMasker( 

1084 mask_img=mask, 

1085 smoothing_fwhm=smoothing_fwhm, 

1086 memory=Memory(None), 

1087 verbose=max(0, verbose - 1), 

1088 memory_level=1, 

1089 ) 

1090 else: 

1091 masker = NiftiMasker( 

1092 mask_img=mask, 

1093 smoothing_fwhm=smoothing_fwhm, 

1094 memory=Memory(None), 

1095 verbose=max(0, verbose - 1), 

1096 memory_level=1, 

1097 ) 

1098 

1099 masker.fit(sample_map) 

1100 

1101 # Report progress 

1102 logger.log( 

1103 "\nComputation of second level model done in " 

1104 f"{time.time() - t0} seconds\n", 

1105 verbose=verbose, 

1106 ) 

1107 

1108 # Check and obtain the contrast 

1109 contrast = _get_con_val(second_level_contrast, design_matrix) 

1110 # Get first-level effect_maps 

1111 effect_maps = _infer_effect_maps(second_level_input, first_level_contrast) 

1112 

1113 _check_n_rows_desmat_vs_n_effect_maps(effect_maps, design_matrix) 

1114 

1115 # Obtain design matrix vars 

1116 var_names = design_matrix.columns.tolist() 

1117 

1118 # Obtain tested_var 

1119 column_mask = [bool(val) for val in contrast] 

1120 tested_var = np.dot(design_matrix, contrast) 

1121 

1122 # Remove tested var from remaining var names 

1123 var_names = [var for var, mask in zip(var_names, column_mask) if not mask] 

1124 

1125 # Obtain confounding vars 

1126 # No other vars in design matrix by default 

1127 confounding_vars = None 

1128 if var_names: 

1129 # Use remaining vars as confounding vars 

1130 confounding_vars = np.asarray(design_matrix[var_names]) 

1131 

1132 # Mask data 

1133 target_vars = masker.transform(effect_maps) 

1134 

1135 # Perform massively univariate analysis with permuted OLS 

1136 outputs = permuted_ols( 

1137 tested_var, 

1138 target_vars, 

1139 confounding_vars=confounding_vars, 

1140 model_intercept=model_intercept, 

1141 n_perm=n_perm, 

1142 two_sided_test=two_sided_test, 

1143 random_state=random_state, 

1144 n_jobs=n_jobs, 

1145 verbose=max(0, verbose - 1), 

1146 masker=masker, 

1147 threshold=threshold, 

1148 tfce=tfce, 

1149 output_type="dict", 

1150 ) 

1151 neg_log10_vfwe_pvals_img = masker.inverse_transform( 

1152 np.ravel(outputs["logp_max_t"]) 

1153 ) 

1154 

1155 if (not tfce) and (threshold is None): 

1156 return neg_log10_vfwe_pvals_img 

1157 

1158 t_img = masker.inverse_transform(np.ravel(outputs["t"])) 

1159 

1160 out = { 

1161 "t": t_img, 

1162 "logp_max_t": neg_log10_vfwe_pvals_img, 

1163 } 

1164 

1165 if tfce: 

1166 neg_log10_tfce_pvals_img = masker.inverse_transform( 

1167 np.ravel(outputs["logp_max_tfce"]), 

1168 ) 

1169 out["tfce"] = masker.inverse_transform(np.ravel(outputs["tfce"])) 

1170 out["logp_max_tfce"] = neg_log10_tfce_pvals_img 

1171 

1172 if threshold is not None: 

1173 # Cluster size-based p-values 

1174 neg_log10_csfwe_pvals_img = masker.inverse_transform( 

1175 np.ravel(outputs["logp_max_size"]), 

1176 ) 

1177 

1178 # Cluster mass-based p-values 

1179 neg_log10_cmfwe_pvals_img = masker.inverse_transform( 

1180 np.ravel(outputs["logp_max_mass"]), 

1181 ) 

1182 

1183 out["size"] = masker.inverse_transform(np.ravel(outputs["size"])) 

1184 out["logp_max_size"] = neg_log10_csfwe_pvals_img 

1185 out["mass"] = masker.inverse_transform(np.ravel(outputs["mass"])) 

1186 out["logp_max_mass"] = neg_log10_cmfwe_pvals_img 

1187 

1188 return out