Coverage for nilearn/_utils/estimator_checks.py: 12%

1016 statements  

« prev     ^ index     » next       coverage.py v7.9.1, created at 2025-06-20 11:02 +0200

1"""Checks for nilearn estimators. 

2 

3Most of those estimators have pytest dependencies 

4and importing them will fail if pytest is not installed. 

5""" 

6 

7import inspect 

8import sys 

9import warnings 

10from copy import deepcopy 

11from pathlib import Path 

12from tempfile import TemporaryDirectory 

13 

14import joblib 

15import numpy as np 

16import pandas as pd 

17import pytest 

18from nibabel import Nifti1Image 

19from numpy.testing import ( 

20 assert_array_almost_equal, 

21 assert_array_equal, 

22 assert_raises, 

23) 

24from packaging.version import parse 

25from sklearn import __version__ as sklearn_version 

26from sklearn import clone 

27from sklearn.base import BaseEstimator, is_classifier, is_regressor 

28from sklearn.datasets import make_classification, make_regression 

29from sklearn.preprocessing import StandardScaler 

30from sklearn.utils._testing import ( 

31 set_random_state, 

32) 

33from sklearn.utils.estimator_checks import ( 

34 _is_public_parameter, 

35 check_is_fitted, 

36) 

37from sklearn.utils.estimator_checks import ( 

38 check_estimator as sklearn_check_estimator, 

39) 

40 

41from nilearn._utils.exceptions import DimensionError, MeshDimensionError 

42from nilearn._utils.helpers import is_matplotlib_installed 

43from nilearn._utils.niimg_conversions import check_imgs_equal 

44from nilearn._utils.tags import SKLEARN_LT_1_6 

45from nilearn._utils.testing import write_imgs_to_path 

46from nilearn.conftest import ( 

47 _affine_eye, 

48 _affine_mni, 

49 _drop_surf_img_part, 

50 _flip_surf_img, 

51 _img_3d_mni, 

52 _img_3d_ones, 

53 _img_3d_rand, 

54 _img_3d_zeros, 

55 _img_4d_rand_eye, 

56 _img_4d_rand_eye_medium, 

57 _img_mask_mni, 

58 _make_mesh, 

59 _make_surface_img, 

60 _make_surface_img_and_design, 

61 _make_surface_mask, 

62 _rng, 

63 _shape_3d_default, 

64 _shape_3d_large, 

65 _surf_mask_1d, 

66) 

67from nilearn.connectome import GroupSparseCovariance, GroupSparseCovarianceCV 

68from nilearn.connectome.connectivity_matrices import ConnectivityMeasure 

69from nilearn.decoding.decoder import _BaseDecoder 

70from nilearn.decoding.searchlight import SearchLight 

71from nilearn.decoding.tests.test_same_api import to_niimgs 

72from nilearn.decomposition._base import _BaseDecomposition 

73from nilearn.maskers import ( 

74 MultiNiftiMapsMasker, 

75 NiftiLabelsMasker, 

76 NiftiMapsMasker, 

77 NiftiMasker, 

78 NiftiSpheresMasker, 

79 SurfaceMapsMasker, 

80 SurfaceMasker, 

81) 

82from nilearn.masking import load_mask_img 

83from nilearn.regions import RegionExtractor 

84from nilearn.regions.hierarchical_kmeans_clustering import HierarchicalKMeans 

85from nilearn.regions.rena_clustering import ReNA 

86from nilearn.reporting.tests.test_html_report import _check_html 

87from nilearn.surface import SurfaceImage 

88from nilearn.surface.surface import get_data as get_surface_data 

89from nilearn.surface.utils import ( 

90 assert_surface_image_equal, 

91) 

92 

93SKLEARN_MINOR = parse(sklearn_version).release[1] 

94 

95 

96def nilearn_dir() -> Path: 

97 return Path(__file__).parents[1] 

98 

99 

100def check_estimator(estimators: list[BaseEstimator], valid: bool = True): 

101 """Yield a valid or invalid scikit-learn estimators check. 

102 

103 ONLY USED FOR sklearn<1.6 

104 

105 As some of Nilearn estimators do not comply 

106 with sklearn recommendations 

107 (cannot fit Numpy arrays, do input validation in the constructor...) 

108 we cannot directly use 

109 sklearn.utils.estimator_checks.check_estimator. 

110 

111 So this is a home made generator that yields an estimator instance 

112 along with a 

113 - valid check from sklearn: those should stay valid 

114 - or an invalid check that is known to fail. 

115 

116 See this section rolling-your-own-estimator in 

117 the scikit-learn doc for more info: 

118 https://scikit-learn.org/stable/developers/develop.html 

119 

120 Parameters 

121 ---------- 

122 estimators : list of estimator object 

123 Estimator instance to check. 

124 

125 valid : bool, default=True 

126 Whether to return only the valid checks or not. 

127 """ 

128 # TODO remove this function when dropping sklearn 1.5 

129 if not SKLEARN_LT_1_6: # pragma: no cover 

130 raise RuntimeError( 

131 "Use dedicated sklearn utilities to test estimators." 

132 ) 

133 

134 if not isinstance(estimators, list): # pragma: no cover 

135 raise TypeError( 

136 "'estimators' should be a list. " 

137 f"Got {estimators.__class__.__name__}." 

138 ) 

139 

140 for est in estimators: 

141 expected_failed_checks = return_expected_failed_checks(est) 

142 

143 for e, check in sklearn_check_estimator( 

144 estimator=est, generate_only=True 

145 ): 

146 if not valid and check.func.__name__ in expected_failed_checks: 

147 yield e, check, check.func.__name__ 

148 if valid and check.func.__name__ not in expected_failed_checks: 

149 yield e, check, check.func.__name__ 

150 

151 

152# some checks would fail on sklearn 1.6.1 on older python 

153# see https://github.com/scikit-learn-contrib/imbalanced-learn/issues/1131 

154IS_SKLEARN_1_6_1_on_py_3_9 = ( 

155 SKLEARN_MINOR == 6 

156 and parse(sklearn_version).release[2] == 1 

157 and sys.version_info[1] < 10 

158) 

159 

160 

161def return_expected_failed_checks( 

162 estimator: BaseEstimator, 

163) -> dict[str, str]: 

164 """Return the expected failures for a given estimator. 

165 

166 This is where all the "expected_failed_checks" for all Nilearn estimators 

167 are centralized. 

168 

169 "expected_failed_checks" is first created to make sure that all checks 

170 with the oldest supported sklearn versions pass. 

171 

172 After the function may tweak the "expected_failed_checks" depending 

173 on the estimator and sklearn version. 

174 

175 Returns 

176 ------- 

177 expected_failed_checks : dict[str, str] 

178 A dictionary of the form:: 

179 

180 { 

181 "check_name": "this check is expected to fail because ...", 

182 } 

183 

184 Where `"check_name"` is the name of the check, and `"my reason"` is why 

185 the check fails. 

186 """ 

187 expected_failed_checks: dict[str, str] = {} 

188 

189 if isinstance(estimator, ConnectivityMeasure): 189 ↛ 211line 189 didn't jump to line 211 because the condition on line 189 was always true

190 expected_failed_checks = { 

191 "check_estimator_sparse_data" : "remove when dropping sklearn 1.4", 

192 "check_fit2d_predict1d": "not applicable", 

193 "check_estimator_sparse_array": "TODO", 

194 "check_estimator_sparse_matrix": "TODO", 

195 "check_methods_sample_order_invariance": "TODO", 

196 "check_methods_subset_invariance": "TODO", 

197 "check_n_features_in": "TODO", 

198 "check_n_features_in_after_fitting": "TODO", 

199 "check_readonly_memmap_input": "TODO", 

200 "check_transformer_data_not_an_array": "TODO", 

201 "check_transformer_general": "TODO", 

202 } 

203 if SKLEARN_MINOR > 4: 203 ↛ 204line 203 didn't jump to line 204 because the condition on line 203 was never true

204 expected_failed_checks.pop("check_estimator_sparse_data") 

205 expected_failed_checks |= { 

206 "check_transformer_preserve_dtypes": "TODO", 

207 } 

208 

209 return expected_failed_checks 

210 

211 elif isinstance(estimator, HierarchicalKMeans): 

212 return expected_failed_checks_clustering() 

213 

214 elif isinstance(estimator, ReNA): 

215 expected_failed_checks = { 

216 "check_estimator_sparse_array": "remove when dropping sklearn 1.4", 

217 "check_estimator_sparse_matrix": ( 

218 "remove when dropping sklearn 1.4" 

219 ), 

220 "check_clustering": "TODO", 

221 "check_dict_unchanged": "TODO", 

222 "check_dtype_object": "TODO", 

223 "check_dont_overwrite_parameters": "TODO", 

224 "check_estimators_dtypes": "TODO", 

225 "check_estimators_fit_returns_self": "TODO", 

226 "check_estimators_nan_inf": "TODO", 

227 "check_estimators_overwrite_params": "TODO", 

228 "check_estimators_pickle": "TODO", 

229 "check_f_contiguous_array_estimator": "TODO", 

230 "check_fit_idempotent": "TODO", 

231 "check_fit_check_is_fitted": "TODO", 

232 "check_fit_score_takes_y": "TODO", 

233 "check_fit2d_predict1d": "TODO", 

234 "check_methods_sample_order_invariance": "TODO", 

235 "check_methods_subset_invariance": "TODO", 

236 "check_n_features_in": "TODO", 

237 "check_n_features_in_after_fitting": "TODO", 

238 "check_pipeline_consistency": "TODO", 

239 "check_positive_only_tag_during_fit": "TODO", 

240 "check_readonly_memmap_input": "TODO", 

241 "check_transformer_data_not_an_array": "TODO", 

242 "check_transformer_general": "TODO", 

243 "check_transformer_preserve_dtypes": "TODO", 

244 } 

245 

246 if SKLEARN_MINOR >= 5: 

247 expected_failed_checks.pop("check_estimator_sparse_matrix") 

248 expected_failed_checks.pop("check_estimator_sparse_array") 

249 

250 return expected_failed_checks 

251 

252 elif isinstance( 

253 estimator, (GroupSparseCovariance, GroupSparseCovarianceCV) 

254 ): 

255 return { 

256 "check_fit_score_takes_y": "not applicable", 

257 "check_fit_check_is_fitted": "handled by nilearn checks", 

258 "check_dict_unchanged": "TODO", 

259 "check_dont_overwrite_parameters": "TODO", 

260 "check_dtype_object": "TODO", 

261 "check_estimator_sparse_array": "TODO", 

262 "check_estimator_sparse_data": "TODO", 

263 "check_estimator_sparse_matrix": "TODO", 

264 "check_estimators_dtypes": "TODO", 

265 "check_estimators_empty_data_messages": "TODO", 

266 "check_estimators_fit_returns_self": "TODO", 

267 "check_estimators_nan_inf": "TODO", 

268 "check_estimators_overwrite_params": "TODO", 

269 "check_estimators_pickle": "TODO", 

270 "check_f_contiguous_array_estimator": "TODO", 

271 "check_fit_idempotent": "TODO", 

272 "check_fit2d_1feature": "TODO", 

273 "check_fit2d_1sample": "TODO", 

274 "check_fit2d_predict1d": "TODO", 

275 "check_methods_sample_order_invariance": "TODO", 

276 "check_methods_subset_invariance": "TODO", 

277 "check_n_features_in": "TODO", 

278 "check_n_features_in_after_fitting": "TODO", 

279 "check_pipeline_consistency": "TODO", 

280 "check_positive_only_tag_during_fit": "TODO", 

281 "check_readonly_memmap_input": "TODO", 

282 } 

283 

284 # below this point we should only deal with estimators 

285 # that accept images as input 

286 assert accept_niimg_input(estimator) or accept_surf_img_input(estimator) 

287 

288 if isinstance(estimator, (_BaseDecoder, SearchLight)): 

289 return expected_failed_checks_decoders(estimator) 

290 

291 # keeping track of some of those in 

292 # https://github.com/nilearn/nilearn/issues/4538 

293 expected_failed_checks = { 

294 # the following are skipped 

295 # because there is nilearn specific replacement 

296 "check_dict_unchanged": "replaced by check_masker_dict_unchanged", 

297 "check_dont_overwrite_parameters": ( 

298 "replaced by check_img_estimator_dont_overwrite_parameters" 

299 ), 

300 "check_estimators_dtypes": ("replaced by check_masker_dtypes"), 

301 "check_estimators_empty_data_messages": ( 

302 "replaced by check_masker_empty_data_messages " 

303 "for surface maskers and not implemented for nifti maskers " 

304 "for performance reasons." 

305 ), 

306 "check_estimators_fit_returns_self": ( 

307 "replaced by check_fit_returns_self" 

308 ), 

309 "check_fit_check_is_fitted": ("replaced by check_masker_fitted"), 

310 "check_fit_score_takes_y": ( 

311 "replaced by check_masker_fit_score_takes_y" 

312 ), 

313 # Those are skipped for now they fail 

314 # for unknown reasons 

315 # most often because sklearn inputs expect a numpy array 

316 # that errors with maskers, 

317 # or because a suitable nilearn replacement 

318 # has not yet been created. 

319 "check_estimators_pickle": "TODO", 

320 "check_estimators_nan_inf": "TODO", 

321 "check_estimators_overwrite_params": "TODO", 

322 "check_fit_idempotent": "TODO", 

323 "check_methods_sample_order_invariance": "TODO", 

324 "check_methods_subset_invariance": "TODO", 

325 "check_positive_only_tag_during_fit": "TODO", 

326 "check_pipeline_consistency": "TODO", 

327 "check_readonly_memmap_input": "TODO", 

328 } 

329 

330 expected_failed_checks |= unapplicable_checks() 

331 

332 if hasattr(estimator, "transform"): 

333 expected_failed_checks |= { 

334 "check_transformer_data_not_an_array": ( 

335 "replaced by check_masker_transformer" 

336 ), 

337 "check_transformer_general": ( 

338 "replaced by check_masker_transformer" 

339 ), 

340 "check_transformer_preserve_dtypes": ( 

341 "replaced by check_masker_transformer" 

342 ), 

343 } 

344 

345 # Adapt some checks for some estimators 

346 

347 # not entirely sure why some of them pass 

348 # e.g check_estimator_sparse_data passes for SurfaceLabelsMasker 

349 # but not SurfaceMasker ???? 

350 

351 if is_glm(estimator): 

352 expected_failed_checks.pop("check_estimator_sparse_data") 

353 if SKLEARN_MINOR >= 5: 

354 expected_failed_checks.pop("check_estimator_sparse_matrix") 

355 expected_failed_checks.pop("check_estimator_sparse_array") 

356 if SKLEARN_MINOR >= 6: 

357 expected_failed_checks.pop("check_estimator_sparse_tag") 

358 

359 expected_failed_checks |= { 

360 # have nilearn replacements 

361 "check_estimators_dtypes": ("replaced by check_glm_dtypes"), 

362 "check_estimators_empty_data_messages": ( 

363 "not implemented for nifti data for performance reasons" 

364 ), 

365 "check_estimators_fit_returns_self": ( 

366 "replaced by check_glm_fit_returns_self" 

367 ), 

368 "check_fit_check_is_fitted": ("replaced by check_glm_is_fitted"), 

369 "check_transformer_data_not_an_array": ( 

370 "replaced by check_masker_transformer" 

371 ), 

372 "check_transformer_general": ( 

373 "replaced by check_masker_transformer" 

374 ), 

375 "check_transformer_preserve_dtypes": ( 

376 "replaced by check_masker_transformer" 

377 ), 

378 # nilearn replacements required 

379 "check_dict_unchanged": "TODO", 

380 "check_fit_score_takes_y": "TODO", 

381 } 

382 

383 if isinstance(estimator, (_BaseDecomposition,)): 

384 if SKLEARN_MINOR >= 6: 

385 expected_failed_checks.pop("check_estimator_sparse_tag") 

386 if not IS_SKLEARN_1_6_1_on_py_3_9 and SKLEARN_MINOR >= 5: 

387 expected_failed_checks.pop("check_estimator_sparse_array") 

388 

389 if is_masker(estimator): 

390 if accept_niimg_input(estimator): 

391 # TODO remove when bumping to nilearn 0.13.2 

392 expected_failed_checks |= { 

393 "check_do_not_raise_errors_in_init_or_set_params": ( 

394 "Deprecation cycle started to fix." 

395 ), 

396 "check_no_attributes_set_in_init": ( 

397 "Deprecation cycle started to fix." 

398 ), 

399 } 

400 

401 if isinstance(estimator, (NiftiMasker)) and SKLEARN_MINOR >= 5: 

402 if not IS_SKLEARN_1_6_1_on_py_3_9: 

403 expected_failed_checks.pop("check_estimator_sparse_array") 

404 

405 expected_failed_checks.pop("check_estimator_sparse_tag") 

406 

407 if isinstance(estimator, (RegionExtractor)) and SKLEARN_MINOR >= 6: 

408 expected_failed_checks.pop( 

409 "check_do_not_raise_errors_in_init_or_set_params" 

410 ) 

411 

412 return expected_failed_checks 

413 

414 

415def unapplicable_checks() -> dict[str, str]: 

416 """Return sklearn checks that do not apply for nilearn estimators \ 

417 when they take images as input. 

418 """ 

419 return dict.fromkeys( 

420 [ 

421 "check_complex_data", 

422 "check_dtype_object", 

423 "check_estimator_sparse_array", 

424 "check_estimator_sparse_data", 

425 "check_estimator_sparse_matrix", 

426 "check_estimator_sparse_tag", 

427 "check_f_contiguous_array_estimator", 

428 "check_fit1d", 

429 "check_fit2d_1feature", 

430 "check_fit2d_1sample", 

431 "check_fit2d_predict1d", 

432 "check_n_features_in", 

433 "check_n_features_in_after_fitting", 

434 ], 

435 "not applicable for image input", 

436 ) 

437 

438 

439def expected_failed_checks_clustering() -> dict[str, str]: 

440 expected_failed_checks = { 

441 "check_estimator_sparse_array": "remove when dropping sklearn 1.4", 

442 "check_estimator_sparse_matrix": "remove when dropping sklearn 1.4", 

443 "check_clustering": "TODO", 

444 "check_estimators_nan_inf": "TODO", 

445 "check_fit2d_predict1d": "TODO", 

446 "check_n_features_in": "TODO", 

447 "check_n_features_in_after_fitting": "TODO", 

448 "check_transformer_data_not_an_array": "TODO", 

449 "check_transformer_general": "TODO", 

450 } 

451 

452 if SKLEARN_MINOR >= 5: 

453 expected_failed_checks.pop("check_estimator_sparse_matrix") 

454 expected_failed_checks.pop("check_estimator_sparse_array") 

455 

456 return expected_failed_checks 

457 

458 

459def expected_failed_checks_decoders(estimator) -> dict[str, str]: 

460 """Return expected failed sklearn checks for nilearn decoders.""" 

461 expected_failed_checks = { 

462 # the following are have nilearn replacement for masker and/or glm 

463 # but not for decoders 

464 "check_estimators_empty_data_messages": ( 

465 "not implemented for nifti data performance reasons" 

466 ), 

467 "check_dont_overwrite_parameters": ( 

468 "replaced by check_img_estimator_dont_overwrite_parameters" 

469 ), 

470 "check_estimators_fit_returns_self": ( 

471 "replaced by check_fit_returns_self" 

472 ), 

473 "check_requires_y_none": ( 

474 "replaced by check_image_estimator_requires_y_none" 

475 ), 

476 "check_supervised_y_no_nan": ( 

477 "replaced by check_image_supervised_estimator_y_no_nan" 

478 ), 

479 # Those are skipped for now they fail 

480 # for unknown reasons 

481 # most often because sklearn inputs expect a numpy array 

482 # that errors with maskers, 

483 # or because a suitable nilearn replacement 

484 # has not yet been created. 

485 "check_dict_unchanged": "TODO", 

486 "check_estimators_dtypes": "TODO", 

487 "check_estimators_pickle": "TODO", 

488 "check_estimators_nan_inf": "TODO", 

489 "check_estimators_overwrite_params": "TODO", 

490 "check_fit_check_is_fitted": "TODO", 

491 "check_fit_idempotent": "TODO", 

492 "check_fit_score_takes_y": "TODO", 

493 "check_methods_sample_order_invariance": "TODO", 

494 "check_methods_subset_invariance": "TODO", 

495 "check_positive_only_tag_during_fit": "TODO", 

496 "check_pipeline_consistency": "TODO", 

497 "check_readonly_memmap_input": "TODO", 

498 "check_supervised_y_2d": "TODO", 

499 } 

500 

501 if is_classifier(estimator): 

502 expected_failed_checks |= { 

503 "check_classifier_data_not_an_array": ( 

504 "not applicable for image input" 

505 ), 

506 "check_classifiers_classes": "TODO", 

507 "check_classifiers_one_label": "TODO", 

508 "check_classifiers_regression_target": "TODO", 

509 "check_classifiers_train": "TODO", 

510 } 

511 

512 if is_regressor(estimator): 

513 expected_failed_checks |= { 

514 "check_regressor_data_not_an_array": ( 

515 "not applicable for image input" 

516 ), 

517 "check_regressor_multioutput": "TODO", 

518 "check_regressors_int": "TODO", 

519 "check_regressors_train": "TODO", 

520 "check_regressors_no_decision_function": "TODO", 

521 } 

522 

523 if hasattr(estimator, "transform"): 

524 expected_failed_checks |= { 

525 "check_transformer_data_not_an_array": ( 

526 "replaced by check_masker_transformer" 

527 ), 

528 "check_transformer_general": ( 

529 "replaced by check_masker_transformer" 

530 ), 

531 "check_transformer_preserve_dtypes": ( 

532 "replaced by check_masker_transformer" 

533 ), 

534 } 

535 

536 expected_failed_checks |= unapplicable_checks() 

537 

538 if isinstance(estimator, SearchLight): 

539 return expected_failed_checks 

540 

541 if not IS_SKLEARN_1_6_1_on_py_3_9: 

542 expected_failed_checks.pop("check_estimator_sparse_tag") 

543 

544 return expected_failed_checks 

545 

546 

547def nilearn_check_estimator(estimators: list[BaseEstimator]): 

548 if not isinstance(estimators, list): # pragma: no cover 

549 raise TypeError( 

550 "'estimators' should be a list. " 

551 f"Got {estimators.__class__.__name__}." 

552 ) 

553 for est in estimators: 

554 for e, check in nilearn_check_generator(estimator=est): 

555 yield e, check, check.__name__ 

556 

557 

558def nilearn_check_generator(estimator: BaseEstimator): 

559 """Yield (estimator, check) tuples. 

560 

561 Each nilearn check can be run on an initialized estimator. 

562 """ 

563 if SKLEARN_LT_1_6: # pragma: no cover 

564 tags = estimator._more_tags() 

565 else: 

566 tags = estimator.__sklearn_tags__() 

567 

568 # TODO remove first if when dropping sklearn 1.5 

569 # for sklearn >= 1.6 tags are always a dataclass 

570 if isinstance(tags, dict) and "X_types" in tags: 570 ↛ 573line 570 didn't jump to line 573 because the condition on line 570 was always true

571 requires_y = isinstance(estimator, _BaseDecoder) 

572 else: 

573 requires_y = getattr(tags.target_tags, "required", False) 

574 

575 yield (clone(estimator), check_estimator_has_sklearn_is_fitted) 

576 yield (clone(estimator), check_fit_returns_self) 

577 yield (clone(estimator), check_transformer_set_output) 

578 

579 if accept_niimg_input(estimator) or accept_surf_img_input(estimator): 579 ↛ 580line 579 didn't jump to line 580 because the condition on line 579 was never true

580 if requires_y: 

581 yield (clone(estimator), check_image_estimator_requires_y_none) 

582 

583 if is_classifier(estimator) or is_regressor(estimator): 

584 yield (clone(estimator), check_image_supervised_estimator_y_no_nan) 

585 yield (clone(estimator), check_decoder_empty_data_messages) 

586 

587 if ( 

588 is_classifier(estimator) 

589 or is_regressor(estimator) 

590 or is_masker(estimator) 

591 or is_glm(estimator) 

592 ): 

593 yield ( 

594 clone(estimator), 

595 check_img_estimator_dont_overwrite_parameters, 

596 ) 

597 yield (clone(estimator), check_img_estimators_overwrite_params) 

598 

599 if is_masker(estimator): 599 ↛ 600line 599 didn't jump to line 600 because the condition on line 599 was never true

600 yield (clone(estimator), check_masker_clean_kwargs) 

601 yield (clone(estimator), check_masker_compatibility_mask_image) 

602 yield (clone(estimator), check_masker_dict_unchanged) 

603 yield (clone(estimator), check_masker_dtypes) 

604 yield (clone(estimator), check_masker_empty_data_messages) 

605 yield (clone(estimator), check_masker_fit_score_takes_y) 

606 yield (clone(estimator), check_masker_fit_with_empty_mask) 

607 yield ( 

608 clone(estimator), 

609 check_masker_fit_with_non_finite_in_mask, 

610 ) 

611 yield (clone(estimator), check_masker_fitted) 

612 yield (clone(estimator), check_masker_generate_report) 

613 yield (clone(estimator), check_masker_generate_report_false) 

614 yield (clone(estimator), check_masker_inverse_transform) 

615 yield (clone(estimator), check_masker_transform_resampling) 

616 yield (clone(estimator), check_masker_mask_img) 

617 yield (clone(estimator), check_masker_mask_img_from_imgs) 

618 yield (clone(estimator), check_masker_no_mask_no_img) 

619 yield (clone(estimator), check_masker_refit) 

620 yield (clone(estimator), check_masker_smooth) 

621 yield (clone(estimator), check_masker_transformer) 

622 yield ( 

623 clone(estimator), 

624 check_masker_transformer_high_variance_confounds, 

625 ) 

626 

627 if not is_multimasker(estimator): 

628 yield (clone(estimator), check_masker_clean) 

629 yield (clone(estimator), check_masker_detrending) 

630 yield (clone(estimator), check_masker_transformer_sample_mask) 

631 yield (clone(estimator), check_masker_with_confounds) 

632 

633 if accept_niimg_input(estimator): 

634 yield (clone(estimator), check_nifti_masker_clean_error) 

635 yield (clone(estimator), check_nifti_masker_clean_warning) 

636 yield (clone(estimator), check_nifti_masker_dtype) 

637 yield (clone(estimator), check_nifti_masker_fit_transform) 

638 yield (clone(estimator), check_nifti_masker_fit_transform_5d) 

639 yield (clone(estimator), check_nifti_masker_fit_transform_files) 

640 yield (clone(estimator), check_nifti_masker_fit_with_3d_mask) 

641 yield ( 

642 clone(estimator), 

643 check_nifti_masker_generate_report_after_fit_with_only_mask, 

644 ) 

645 

646 if is_multimasker(estimator): 

647 yield ( 

648 clone(estimator), 

649 check_multi_nifti_masker_generate_report_4d_fit, 

650 ) 

651 yield ( 

652 clone(estimator), 

653 check_multi_masker_transformer_high_variance_confounds, 

654 ) 

655 yield ( 

656 clone(estimator), 

657 check_multi_masker_transformer_sample_mask, 

658 ) 

659 yield (clone(estimator), check_multi_masker_with_confounds) 

660 

661 if accept_surf_img_input(estimator): 

662 yield (clone(estimator), check_surface_masker_fit_with_mask) 

663 yield (clone(estimator), check_surface_masker_list_surf_images) 

664 

665 if is_glm(estimator): 665 ↛ 666line 665 didn't jump to line 666 because the condition on line 665 was never true

666 yield (clone(estimator), check_glm_dtypes) 

667 yield (clone(estimator), check_glm_empty_data_messages) 

668 yield (clone(estimator), check_glm_is_fitted) 

669 

670 

671def get_tag(estimator: BaseEstimator, tag: str) -> bool: 

672 tags = estimator.__sklearn_tags__() 

673 # TODO remove first if when dropping sklearn 1.5 

674 # for sklearn >= 1.6 tags are always a dataclass 

675 if isinstance(tags, dict) and "X_types" in tags: 675 ↛ 678line 675 didn't jump to line 678 because the condition on line 675 was always true

676 return tag in tags["X_types"] 

677 else: 

678 return getattr(tags.input_tags, tag, False) 

679 

680 

681def is_masker(estimator: BaseEstimator) -> bool: 

682 return get_tag(estimator, "masker") 

683 

684 

685def is_multimasker(estimator: BaseEstimator) -> bool: 

686 return get_tag(estimator, "multi_masker") 

687 

688 

689def is_glm(estimator: BaseEstimator) -> bool: 

690 return get_tag(estimator, "glm") 

691 

692 

693def accept_niimg_input(estimator: BaseEstimator) -> bool: 

694 return get_tag(estimator, "niimg_like") 

695 

696 

697def accept_surf_img_input(estimator: BaseEstimator) -> bool: 

698 return get_tag(estimator, "surf_img") 

699 

700 

701def _not_fitted_error_message(estimator): 

702 return ( 

703 f"This {type(estimator).__name__} instance is not fitted yet. " 

704 "Call 'fit' with appropriate arguments before using this estimator." 

705 ) 

706 

707 

708def fit_estimator(estimator: BaseEstimator) -> BaseEstimator: 

709 """Fit on a nilearn estimator with appropriate input and return it.""" 

710 assert accept_niimg_input(estimator) or accept_surf_img_input(estimator) 

711 

712 if is_glm(estimator): 

713 data, design_matrices = _make_surface_img_and_design() 

714 # FirstLevel 

715 if hasattr(estimator, "hrf_model"): 

716 return estimator.fit(data, design_matrices=design_matrices) 

717 # SecondLevel 

718 else: 

719 return estimator.fit(data, design_matrix=design_matrices) 

720 

721 elif isinstance(estimator, SearchLight): 

722 n_samples = 30 

723 data = _rng().random((5, 5, 5, n_samples)) 

724 # Create a condition array, with balanced classes 

725 y = np.arange(n_samples, dtype=int) >= (n_samples // 2) 

726 

727 data[2, 2, 2, :] = 0 

728 data[2, 2, 2, y] = 2 

729 X = Nifti1Image(data, np.eye(4)) 

730 

731 return estimator.fit(X, y) 

732 

733 elif is_classifier(estimator): 

734 dim = 5 

735 X, y = make_classification( 

736 n_samples=30, 

737 n_features=dim**3, 

738 scale=3.0, 

739 n_informative=5, 

740 n_classes=2, 

741 random_state=42, 

742 ) 

743 X, _ = to_niimgs(X, [dim, dim, dim]) 

744 return estimator.fit(X, y) 

745 

746 elif is_regressor(estimator): 

747 dim = 5 

748 X, y = make_regression( 

749 n_samples=30, 

750 n_features=dim**3, 

751 n_informative=dim, 

752 noise=1.5, 

753 bias=1.0, 

754 random_state=42, 

755 ) 

756 X = StandardScaler().fit_transform(X) 

757 X, _ = to_niimgs(X, [dim, dim, dim]) 

758 return estimator.fit(X, y) 

759 

760 elif is_masker(estimator): 

761 if accept_niimg_input(estimator): 

762 imgs = Nifti1Image(_rng().random(_shape_3d_large()), _affine_eye()) 

763 else: 

764 imgs = _make_surface_img(10) 

765 return estimator.fit(imgs) 

766 

767 else: 

768 imgs = Nifti1Image(_rng().random(_shape_3d_large()), _affine_eye()) 

769 return estimator.fit(imgs) 

770 

771 

772# ------------------ GENERIC CHECKS ------------------ 

773 

774 

775def _check_mask_img_(estimator): 

776 if accept_niimg_input(estimator): 

777 assert isinstance(estimator.mask_img_, Nifti1Image) 

778 else: 

779 assert isinstance(estimator.mask_img_, SurfaceImage) 

780 load_mask_img(estimator.mask_img_) 

781 

782 

783def check_estimator_has_sklearn_is_fitted(estimator): 

784 """Check appropriate response to check_fitted from sklearn before fitting. 

785 

786 check that before fitting 

787 - estimator has a __sklearn_is_fitted__ method 

788 - running sklearn check_is_fitted on estimator throws an error 

789 """ 

790 if not hasattr(estimator, "__sklearn_is_fitted__"): 

791 raise TypeError( 

792 "All nilearn estimators must have __sklearn_is_fitted__ method." 

793 ) 

794 

795 if estimator.__sklearn_is_fitted__() is True: 

796 raise ValueError( 

797 "Estimator __sklearn_is_fitted__ must return False before fit." 

798 ) 

799 

800 with pytest.raises(ValueError, match=_not_fitted_error_message(estimator)): 

801 check_is_fitted(estimator) 

802 

803 

804def check_transformer_set_output(estimator): 

805 """Check that set_ouput throws a not implemented error.""" 

806 if hasattr(estimator, "transform"): 

807 with pytest.raises(NotImplementedError): 

808 estimator.set_output(transform="default") 

809 

810 

811def check_fit_returns_self(estimator) -> None: 

812 """Check maskers return itself after fit. 

813 

814 Replace sklearn check_estimators_fit_returns_self 

815 """ 

816 # TODO make sure the following estimator pass this check 

817 if isinstance( 

818 estimator, 

819 ( 

820 _BaseDecomposition, 

821 ReNA, 

822 HierarchicalKMeans, 

823 GroupSparseCovariance, 

824 GroupSparseCovarianceCV, 

825 ConnectivityMeasure, 

826 ), 

827 ): 

828 return None 

829 

830 fitted_estimator = fit_estimator(estimator) 

831 

832 assert fitted_estimator is estimator 

833 

834 

835def check_img_estimator_dont_overwrite_parameters(estimator) -> None: 

836 """Check that fit method only changes or sets private attributes. 

837 

838 Only for estimator that work with images. 

839 

840 Replaces check_dont_overwrite_parameters from sklearn. 

841 """ 

842 estimator = clone(estimator) 

843 

844 set_random_state(estimator, 1) 

845 

846 dict_before_fit = estimator.__dict__.copy() 

847 

848 fitted_estimator = fit_estimator(estimator) 

849 

850 dict_after_fit = fitted_estimator.__dict__ 

851 

852 public_keys_after_fit = [ 

853 key for key in dict_after_fit if _is_public_parameter(key) 

854 ] 

855 

856 attrs_added_by_fit = [ 

857 key for key in public_keys_after_fit if key not in dict_before_fit 

858 ] 

859 

860 # check that fit doesn't add any public attribute 

861 assert not attrs_added_by_fit, ( 

862 f"Estimator {estimator.__class__.__name__} " 

863 "adds public attribute(s) during" 

864 " the fit method." 

865 " Estimators are only allowed to add private attributes" 

866 " either started with _ or ended" 

867 f" with _ but [{', '.join(attrs_added_by_fit)}] added" 

868 ) 

869 

870 # check that fit doesn't change any public attribute 

871 

872 # nifti_maps_masker, nifti_maps_masker, nifti_spheres_masker 

873 # change memory parameters on fit if it's None 

874 keys_to_ignore = ["memory"] 

875 

876 attrs_changed_by_fit = [ 

877 key 

878 for key in public_keys_after_fit 

879 if (dict_before_fit[key] is not dict_after_fit[key]) 

880 and key not in keys_to_ignore 

881 ] 

882 

883 assert not attrs_changed_by_fit, ( 

884 f"Estimator {estimator.__class__.__name__} " 

885 "changes public attribute(s) during" 

886 " the fit method. Estimators are only allowed" 

887 " to change attributes started" 

888 " or ended with _, but" 

889 f" [{', '.join(attrs_changed_by_fit)}] changed" 

890 ) 

891 

892 

893def check_img_estimators_overwrite_params(estimator) -> None: 

894 """Check that we do not change or mutate the internal state of input. 

895 

896 Replaces sklearn check_estimators_overwrite_params 

897 """ 

898 estimator = clone(estimator) 

899 

900 # Make a physical copy of the original estimator parameters before fitting. 

901 params = estimator.get_params() 

902 original_params = deepcopy(params) 

903 

904 # Fit the model 

905 fitted_estimator = fit_estimator(estimator) 

906 

907 # Compare the state of the model parameters with the original parameters 

908 new_params = fitted_estimator.get_params() 

909 

910 # nifti_maps_masker, nifti_maps_masker, nifti_spheres_masker 

911 # change memory parameters on fit if it's None 

912 param_to_ignore = ["memory"] 

913 

914 for param_name, original_value in original_params.items(): 

915 if param_name in param_to_ignore: 

916 continue 

917 

918 new_value = new_params[param_name] 

919 

920 # We should never change or mutate the internal state of input 

921 # parameters by default. To check this we use the joblib.hash function 

922 # that introspects recursively any subobjects to compute a checksum. 

923 # The only exception to this rule of immutable constructor parameters 

924 # is possible RandomState instance but in this check we explicitly 

925 # fixed the random_state params recursively to be integer seeds. 

926 assert joblib.hash(new_value) == joblib.hash(original_value), ( 

927 f"Estimator {estimator.__class__.__name__} " 

928 "should not change or mutate " 

929 f"the parameter {param_name} from {original_value} " 

930 f"to {new_value} during fit." 

931 ) 

932 

933 

934# ------------------ DECODERS CHECKS ------------------ 

935 

936 

937def check_image_estimator_requires_y_none(estimator) -> None: 

938 """Check estimator with requires_y=True fails gracefully for y=None. 

939 

940 Replaces sklearn check_requires_y_none 

941 """ 

942 expected_err_msgs = "requires y to be passed, but the target y is None" 

943 shape = (5, 5, 5) if isinstance(estimator, SearchLight) else (30, 31, 32) 

944 input_img = Nifti1Image(_rng().random(shape), _affine_eye()) 

945 try: 

946 estimator.fit(input_img, None) 

947 except ValueError as ve: 

948 if all(msg not in str(ve) for msg in expected_err_msgs): 

949 raise ve 

950 

951 

952def check_image_supervised_estimator_y_no_nan(estimator) -> None: 

953 """Check estimator fails if y contains nan or inf. 

954 

955 Replaces sklearn check_supervised_y_no_nan 

956 """ 

957 dim = 5 

958 if isinstance(estimator, SearchLight): 

959 n_samples = 30 

960 # Create a condition array, with balanced classes 

961 y = np.arange(n_samples, dtype=int) >= (n_samples // 2) 

962 

963 data = _rng().random((dim, dim, dim, n_samples)) 

964 data[2, 2, 2, :] = 0 

965 data[2, 2, 2, y] = 2 

966 X = Nifti1Image(data, np.eye(4)) 

967 

968 else: 

969 # we can use classification data even for regressors 

970 # because fit should fail early 

971 X, y = make_classification( 

972 n_samples=20, 

973 n_features=dim**3, 

974 scale=3.0, 

975 n_informative=5, 

976 n_classes=2, 

977 random_state=42, 

978 ) 

979 X, _ = to_niimgs(X, [dim, dim, dim]) 

980 

981 y = _rng().random(y.shape) 

982 

983 for value in [np.inf, np.nan]: 

984 y[5,] = value 

985 with pytest.raises(ValueError, match="Input .*contains"): 

986 estimator.fit(X, y) 

987 

988 

989def check_decoder_empty_data_messages(estimator): 

990 """Check that empty images are caught properly. 

991 

992 Replaces sklearn check_estimators_empty_data_messages. 

993 

994 Not implemented for nifti data for performance reasons. 

995 See : https://github.com/nilearn/nilearn/pull/5293#issuecomment-2977170723 

996 """ 

997 n_samples = 30 

998 if isinstance(estimator, SearchLight): 

999 # SearchLight do not support surface data directly 

1000 return None 

1001 

1002 else: 

1003 # we can use classification data even for regressors 

1004 # because fit should fail early 

1005 dim = 5 

1006 _, y = make_classification( 

1007 n_samples=20, 

1008 n_features=dim**3, 

1009 scale=3.0, 

1010 n_informative=5, 

1011 n_classes=2, 

1012 random_state=42, 

1013 ) 

1014 

1015 imgs = _make_surface_img(n_samples) 

1016 data = { 

1017 part: np.empty(0).reshape((imgs.data.parts[part].shape[0], 0)) 

1018 for part in imgs.data.parts 

1019 } 

1020 X = SurfaceImage(imgs.mesh, data) 

1021 

1022 y = _rng().random(y.shape) 

1023 

1024 with pytest.raises(ValueError, match="empty"): 

1025 estimator.fit(X, y) 

1026 

1027 

1028# ------------------ MASKER CHECKS ------------------ 

1029 

1030 

1031def check_masker_dict_unchanged(estimator): 

1032 """Replace check_dict_unchanged from sklearn. 

1033 

1034 transform() should not changed the dict of the object. 

1035 """ 

1036 if accept_niimg_input(estimator): 

1037 # We use a different shape here to force some maskers 

1038 # to perform a resampling. 

1039 shape = (30, 31, 32) 

1040 input_img = Nifti1Image(_rng().random(shape), _affine_eye()) 

1041 else: 

1042 input_img = _make_surface_img(10) 

1043 

1044 estimator = estimator.fit(input_img) 

1045 

1046 dict_before = estimator.__dict__.copy() 

1047 

1048 estimator.transform(input_img) 

1049 

1050 dict_after = estimator.__dict__ 

1051 

1052 # TODO NiftiLabelsMasker is modified at transform time 

1053 # see issue https://github.com/nilearn/nilearn/issues/2720 

1054 if isinstance(estimator, (NiftiLabelsMasker)): 

1055 with pytest.raises(AssertionError): 

1056 assert dict_after == dict_before 

1057 else: 

1058 # The following try / except is mostly 

1059 # to give more informative error messages when this check fails. 

1060 try: 

1061 assert dict_after == dict_before 

1062 except AssertionError as e: 

1063 unmatched_keys = set(dict_after.keys()) ^ set(dict_before.keys()) 

1064 if len(unmatched_keys) > 0: 

1065 raise ValueError( 

1066 "Estimator changes '__dict__' keys during transform.\n" 

1067 f"{unmatched_keys} \n" 

1068 ) 

1069 

1070 difference = {} 

1071 for x in dict_before: 

1072 if type(dict_before[x]) is not type(dict_after[x]): 

1073 difference[x] = { 

1074 "before": dict_before[x], 

1075 "after": dict_after[x], 

1076 } 

1077 continue 

1078 if ( 

1079 isinstance(dict_before[x], np.ndarray) 

1080 and not np.array_equal(dict_before[x], dict_after[x]) 

1081 and not check_imgs_equal(dict_before[x], dict_after[x]) 

1082 ) or ( 

1083 not isinstance(dict_before[x], (np.ndarray, Nifti1Image)) 

1084 and dict_before[x] != dict_after[x] 

1085 ): 

1086 difference[x] = { 

1087 "before": dict_before[x], 

1088 "after": dict_after[x], 

1089 } 

1090 continue 

1091 if difference: 

1092 raise ValueError( 

1093 "Estimator changes the following '__dict__' keys \n" 

1094 "during transform.\n" 

1095 f"{difference}" 

1096 ) 

1097 else: 

1098 raise e 

1099 except Exception as e: 

1100 raise e 

1101 

1102 

1103def check_masker_fitted(estimator): 

1104 """Check appropriate response of maskers to check_fitted from sklearn. 

1105 

1106 Should act as a replacement in the case of the maskers 

1107 for sklearn's check_fit_check_is_fitted 

1108 

1109 check that before fitting 

1110 - transform() and inverse_transform() \ 

1111 throw same error 

1112 

1113 check that after fitting 

1114 - __sklearn_is_fitted__ returns true 

1115 - running sklearn check_fitted throws no error 

1116 - masker have a n_elements_ attribute that is positive int 

1117 """ 

1118 # Failure should happen before the input type is determined 

1119 # so we can pass nifti image to surface maskers. 

1120 with pytest.raises(ValueError, match=_not_fitted_error_message(estimator)): 

1121 estimator.transform(_img_3d_rand()) 

1122 with pytest.raises(ValueError, match=_not_fitted_error_message(estimator)): 

1123 estimator.transform_single_imgs(_img_3d_rand()) 

1124 if is_multimasker(estimator): 

1125 with pytest.raises( 

1126 ValueError, match=_not_fitted_error_message(estimator) 

1127 ): 

1128 estimator.transform_imgs([_img_3d_rand()]) 

1129 

1130 # Failure should happen before the size of the input type is determined 

1131 # so we can pass any array here. 

1132 signals = np.ones((10, 11)) 

1133 with pytest.raises(ValueError, match=_not_fitted_error_message(estimator)): 

1134 estimator.inverse_transform(signals) 

1135 

1136 # NiftiMasker and SurfaceMasker cannot accept None on fit 

1137 if accept_niimg_input(estimator): 

1138 estimator.fit(_img_3d_rand()) 

1139 else: 

1140 estimator.fit(_make_surface_img(10)) 

1141 

1142 assert estimator.__sklearn_is_fitted__() 

1143 

1144 check_is_fitted(estimator) 

1145 

1146 assert isinstance(estimator.n_elements_, int) and estimator.n_elements_ > 0 

1147 

1148 

1149def check_masker_clean_kwargs(estimator): 

1150 """Check attributes for cleaning. 

1151 

1152 Maskers accept a clean_args dict 

1153 and store in clean_args and contains parameters to pass to clean. 

1154 """ 

1155 assert estimator.clean_args is None 

1156 

1157 

1158def check_masker_detrending(estimator): 

1159 """Check detrending does something. 

1160 

1161 Fit transform on same input should give different results 

1162 if detrend is true or false. 

1163 """ 

1164 if accept_niimg_input(estimator): 

1165 input_img = _img_4d_rand_eye_medium() 

1166 else: 

1167 input_img = _make_surface_img(100) 

1168 

1169 signal = estimator.fit_transform(input_img) 

1170 

1171 estimator.detrend = True 

1172 detrended_signal = estimator.fit_transform(input_img) 

1173 

1174 assert_raises(AssertionError, assert_array_equal, detrended_signal, signal) 

1175 

1176 

1177def check_masker_compatibility_mask_image(estimator): 

1178 """Check compatibility of the mask_img and images to masker. 

1179 

1180 Compatibility should be check at fit and transform time. 

1181 

1182 For nifti maskers this is handled by one the check_nifti functions. 

1183 For surface maskers, check_compatibility_mask_and_images does it. 

1184 But this means we do not have exactly the same error messages. 

1185 """ 

1186 if accept_niimg_input(estimator): 

1187 mask_img = _img_mask_mni() 

1188 input_img = _make_surface_img() 

1189 else: 

1190 mask_img = _make_surface_mask() 

1191 input_img = _img_3d_mni() 

1192 

1193 estimator.mask_img = mask_img 

1194 with pytest.raises(TypeError): 

1195 estimator.fit(input_img) 

1196 

1197 if accept_niimg_input(estimator): 

1198 # using larger images to be compatible 

1199 # with regions extraction tests 

1200 mask = np.zeros(_shape_3d_large(), dtype=np.int8) 

1201 mask[1:-1, 1:-1, 1:-1] = 1 

1202 mask_img = Nifti1Image(mask, _affine_eye()) 

1203 image_to_transform = _make_surface_img() 

1204 else: 

1205 mask_img = _make_surface_mask() 

1206 image_to_transform = _img_3d_mni() 

1207 

1208 estimator = clone(estimator) 

1209 estimator.mask_img = mask_img 

1210 estimator.fit() 

1211 with pytest.raises(TypeError): 

1212 estimator.transform(image_to_transform) 

1213 

1214 _check_mask_img_(estimator) 

1215 

1216 

1217def check_masker_no_mask_no_img(estimator): 

1218 """Check maskers mask_img_ when no mask passed at init or imgs at fit. 

1219 

1220 For (Multi)NiftiMasker and SurfaceMasker fit should raise ValueError. 

1221 For all other maskers mask_img_ should be None after fit. 

1222 """ 

1223 assert not hasattr(estimator, "mask_img_") 

1224 

1225 if isinstance(estimator, (NiftiMasker, SurfaceMasker)): 

1226 with pytest.raises( 

1227 ValueError, match="Parameter 'imgs' must be provided to " 

1228 ): 

1229 estimator.fit() 

1230 else: 

1231 estimator.fit() 

1232 assert estimator.mask_img_ is None 

1233 

1234 

1235def check_masker_mask_img_from_imgs(estimator): 

1236 """Check maskers mask_img_ inferred from imgs when no mask is provided. 

1237 

1238 For (Multi)NiftiMasker and SurfaceMasker: 

1239 they must have a valid mask_img_ after fit. 

1240 For all other maskers mask_img_ should be None after fit. 

1241 """ 

1242 if accept_niimg_input(estimator): 

1243 # Small image with shape=(7, 8, 9) would fail with MultiNiftiMasker 

1244 # giving mask_img_that mask all the data : do not know why!!! 

1245 input_img = Nifti1Image( 

1246 _rng().random(_shape_3d_large()), _affine_mni() 

1247 ) 

1248 

1249 else: 

1250 input_img = _make_surface_img(2) 

1251 

1252 # Except for (Multi)NiftiMasker and SurfaceMasker, 

1253 # maskers have mask_img_ = None after fitting some input image 

1254 # when no mask was passed at construction 

1255 estimator = clone(estimator) 

1256 assert not hasattr(estimator, "mask_img_") 

1257 

1258 estimator.fit(input_img) 

1259 

1260 if isinstance(estimator, (NiftiMasker, SurfaceMasker)): 

1261 _check_mask_img_(estimator) 

1262 else: 

1263 assert estimator.mask_img_ is None 

1264 

1265 

1266def check_masker_mask_img(estimator): 

1267 """Check maskers mask_img_ post fit is valid. 

1268 

1269 If a mask is passed at construction, 

1270 then mask_img_ should be a valid mask after fit. 

1271 

1272 Maskers should be fittable 

1273 even when passing a non-binary image 

1274 with multiple samples (4D for volume, 2D for surface) as mask. 

1275 Resulting mask_img_ should be binary and have a single sample. 

1276 """ 

1277 if accept_niimg_input(estimator): 

1278 # Small image with shape=(7, 8, 9) would fail with MultiNiftiMasker 

1279 # giving mask_img_that mask all the data : do not know why!!! 

1280 mask_data = np.zeros(_shape_3d_large(), dtype="int8") 

1281 mask_data[2:-2, 2:-2, 2:-2] = 1 

1282 binary_mask_img = Nifti1Image(mask_data, _affine_eye()) 

1283 

1284 input_img = Nifti1Image( 

1285 _rng().random(_shape_3d_large()), _affine_eye() 

1286 ) 

1287 

1288 non_binary_mask_img = Nifti1Image( 

1289 _rng().random((*_shape_3d_large(), 2)), _affine_eye() 

1290 ) 

1291 

1292 else: 

1293 binary_mask_img = _make_surface_mask() 

1294 non_binary_mask_img = _make_surface_img() 

1295 

1296 input_img = _make_surface_img(2) 

1297 

1298 # happy path 

1299 estimator = clone(estimator) 

1300 estimator.mask_img = binary_mask_img 

1301 assert not hasattr(estimator, "mask_img_") 

1302 

1303 estimator.fit() 

1304 

1305 _check_mask_img_(estimator) 

1306 

1307 # use non binary multi-sample image as mask 

1308 estimator = clone(estimator) 

1309 estimator.mask_img = non_binary_mask_img 

1310 assert not hasattr(estimator, "mask_img_") 

1311 

1312 estimator.fit() 

1313 

1314 _check_mask_img_(estimator) 

1315 

1316 # use mask at init and imgs at fit 

1317 # mask at init should prevail 

1318 estimator = clone(estimator) 

1319 estimator.mask_img = binary_mask_img 

1320 

1321 estimator.fit() 

1322 ref_mask_img_ = estimator.mask_img_ 

1323 

1324 estimator = clone(estimator) 

1325 estimator.mask_img = binary_mask_img 

1326 

1327 assert not hasattr(estimator, "mask_img_") 

1328 

1329 if isinstance(estimator, (NiftiMasker, SurfaceMasker)): 

1330 with pytest.warns( 

1331 UserWarning, 

1332 match=( 

1333 "Generation of a mask has been requested .* " 

1334 "while a mask was given at masker creation." 

1335 ), 

1336 ): 

1337 estimator.fit(input_img) 

1338 else: 

1339 estimator.fit(input_img) 

1340 

1341 _check_mask_img_(estimator) 

1342 if accept_niimg_input(estimator): 

1343 assert_array_equal( 

1344 ref_mask_img_.get_fdata(), estimator.mask_img_.get_fdata() 

1345 ) 

1346 else: 

1347 assert_array_equal( 

1348 get_surface_data(ref_mask_img_), 

1349 get_surface_data(estimator.mask_img_), 

1350 ) 

1351 

1352 

1353def check_masker_clean(estimator): 

1354 """Check that cleaning does something on fit transform. 

1355 

1356 Fit transform on same input should give different results 

1357 if some cleaning parameters are passed. 

1358 """ 

1359 if accept_niimg_input(estimator): 

1360 input_img = _img_4d_rand_eye_medium() 

1361 else: 

1362 input_img = _make_surface_img(100) 

1363 

1364 signal = estimator.fit_transform(input_img) 

1365 

1366 estimator.t_r = 2.0 

1367 estimator.high_pass = 1 / 128 

1368 estimator.clean_args = {"filter": "cosine"} 

1369 detrended_signal = estimator.fit_transform(input_img) 

1370 

1371 assert_raises(AssertionError, assert_array_equal, detrended_signal, signal) 

1372 

1373 

1374def check_masker_transformer(estimator): 

1375 """Replace sklearn _check_transformer for maskers. 

1376 

1377 - for maskers transform is in the base class and 

1378 implemented via a transform_single_imgs 

1379 - checks that "imgs" (and not X) is the parameter 

1380 for input for fit / transform 

1381 - fit_transform method should work on non fitted estimator 

1382 - fit_transform should give same result as fit then transform 

1383 """ 

1384 # transform_single_imgs should not be an abstract method anymore 

1385 assert not getattr( 

1386 estimator.transform_single_imgs, "__isabstractmethod__", False 

1387 ) 

1388 

1389 for attr in ["fit", "transform", "fit_transform"]: 

1390 tmp = dict(**inspect.signature(getattr(estimator, attr)).parameters) 

1391 assert next(iter(tmp)) == "imgs" 

1392 assert "X" not in tmp 

1393 

1394 if accept_niimg_input(estimator): 

1395 input_img = _img_4d_rand_eye_medium() 

1396 else: 

1397 input_img = _make_surface_img(100) 

1398 

1399 signal_1 = estimator.fit_transform(input_img) 

1400 

1401 estimator = clone(estimator) 

1402 signal_2 = estimator.fit(input_img).transform(input_img) 

1403 

1404 assert_array_equal(signal_1, signal_2) 

1405 

1406 

1407def check_masker_transformer_high_variance_confounds(estimator): 

1408 """Check high_variance_confounds use in maskers. 

1409 

1410 Make sure that using high_variance_confounds returns different result. 

1411 

1412 Ensure that high_variance_confounds can be used with regular confounds, 

1413 and that results are different than when just using the confounds alone. 

1414 """ 

1415 length = 10 

1416 

1417 if accept_niimg_input(estimator): 

1418 data = _rng().random((*_shape_3d_default(), length)) 

1419 input_img = Nifti1Image(data, _affine_eye()) 

1420 else: 

1421 input_img = _make_surface_img(length) 

1422 

1423 estimator.high_variance_confounds = False 

1424 

1425 signal = estimator.fit_transform(input_img) 

1426 

1427 estimator = clone(estimator) 

1428 estimator.high_variance_confounds = True 

1429 

1430 signal_hvc = estimator.fit_transform(input_img) 

1431 

1432 assert_raises(AssertionError, assert_array_equal, signal, signal_hvc) 

1433 

1434 with TemporaryDirectory() as tmp_dir: 

1435 array = _rng().random((length, 3)) 

1436 

1437 dataframe = pd.DataFrame(array) 

1438 

1439 tmp_dir = Path(tmp_dir) 

1440 dataframe.to_csv(tmp_dir / "confounds.csv") 

1441 

1442 for c in [array, dataframe, tmp_dir / "confounds.csv"]: 

1443 confounds = [c] if is_multimasker(estimator) else c 

1444 

1445 estimator = clone(estimator) 

1446 estimator.high_variance_confounds = False 

1447 signal_c = estimator.fit_transform(input_img, confounds=confounds) 

1448 

1449 estimator = clone(estimator) 

1450 estimator.high_variance_confounds = True 

1451 signal_c_hvc = estimator.fit_transform( 

1452 input_img, confounds=confounds 

1453 ) 

1454 

1455 assert_raises( 

1456 AssertionError, assert_array_equal, signal_c, signal_c_hvc 

1457 ) 

1458 

1459 

1460def check_masker_transformer_sample_mask(estimator): 

1461 """Check sample_mask use in maskers. 

1462 

1463 Make sure that using sample_mask returns different result 

1464 compare to when it's not used. 

1465 

1466 Try different types of sample_mask 

1467 that always keep the same samples (sample 1, 2 and 4) 

1468 that should all return the same thing. 

1469 """ 

1470 if accept_niimg_input(estimator): 

1471 input_img = _img_4d_rand_eye() 

1472 else: 

1473 input_img = _make_surface_img(5) 

1474 

1475 estimator.fit(input_img) 

1476 signal_1 = estimator.transform(input_img, sample_mask=None) 

1477 

1478 assert signal_1.ndim == 2 

1479 

1480 # index sample to keep 

1481 sample_mask = np.asarray([1, 2, 4]) 

1482 

1483 signal_2 = estimator.transform(input_img, sample_mask=sample_mask) 

1484 

1485 assert signal_2.shape[0] == 3 

1486 

1487 assert_raises(AssertionError, assert_array_equal, signal_1, signal_2) 

1488 

1489 # logical indexing 

1490 n_sample = signal_1.shape[0] 

1491 sample_mask = np.full((n_sample,), True) 

1492 np.put(sample_mask, [0, 3], [False, False]) 

1493 

1494 signal_3 = estimator.transform(input_img, sample_mask=sample_mask) 

1495 

1496 assert_array_equal(signal_2, signal_3) 

1497 

1498 # list of explicit index 

1499 sample_mask = [[1, 2, 4]] 

1500 

1501 signal_4 = estimator.transform(input_img, sample_mask=sample_mask) 

1502 

1503 assert_array_equal(signal_2, signal_4) 

1504 

1505 # list of logical index 

1506 sample_mask = [[False, True, True, False, True]] 

1507 

1508 signal_5 = estimator.transform(input_img, sample_mask=sample_mask) 

1509 

1510 assert_array_equal(signal_2, signal_5) 

1511 

1512 

1513def check_masker_with_confounds(estimator): 

1514 """Test fit_transform with confounds. 

1515 

1516 Check different types of confounds 

1517 (array, dataframe, str or path to txt, csv, tsv) 

1518 and ensure results is different 

1519 than when not using confounds. 

1520 

1521 Check proper errors are raised if file is not found 

1522 or if confounds do not match signal length. 

1523 

1524 For more tests see those of signal.clean. 

1525 """ 

1526 length = 20 

1527 if accept_niimg_input(estimator): 

1528 input_img = Nifti1Image( 

1529 _rng().random((4, 5, 6, length)), affine=_affine_eye() 

1530 ) 

1531 else: 

1532 input_img = _make_surface_img(length) 

1533 

1534 signal_1 = estimator.fit_transform(input_img, confounds=None) 

1535 

1536 array = _rng().random((length, 3)) 

1537 

1538 dataframe = pd.DataFrame(array) 

1539 

1540 confounds_path = nilearn_dir() / "tests" / "data" / "spm_confounds.txt" 

1541 

1542 for confounds in [array, dataframe, confounds_path, str(confounds_path)]: 

1543 signal_2 = estimator.fit_transform(input_img, confounds=confounds) 

1544 

1545 assert_raises(AssertionError, assert_array_equal, signal_1, signal_2) 

1546 

1547 with TemporaryDirectory() as tmp_dir: 

1548 tmp_dir = Path(tmp_dir) 

1549 dataframe.to_csv(tmp_dir / "confounds.csv") 

1550 signal_2 = estimator.fit_transform( 

1551 input_img, confounds=tmp_dir / "confounds.csv" 

1552 ) 

1553 

1554 assert_raises(AssertionError, assert_array_equal, signal_1, signal_2) 

1555 

1556 dataframe.to_csv(tmp_dir / "confounds.tsv", sep="\t") 

1557 signal_2 = estimator.fit_transform( 

1558 input_img, confounds=tmp_dir / "confounds.tsv" 

1559 ) 

1560 

1561 assert_raises(AssertionError, assert_array_equal, signal_1, signal_2) 

1562 

1563 with pytest.raises(FileNotFoundError): 

1564 estimator.fit_transform(input_img, confounds="not_a_file.txt") 

1565 

1566 with pytest.raises( 

1567 ValueError, match="Confound signal has an incorrect length" 

1568 ): 

1569 estimator.fit_transform( 

1570 input_img, confounds=_rng().random((length * 2, 3)) 

1571 ) 

1572 

1573 

1574def check_masker_refit(estimator): 

1575 """Check masker can be refitted and give different results.""" 

1576 if accept_niimg_input(estimator): 

1577 # using larger images to be compatible 

1578 # with regions extraction tests 

1579 mask = np.zeros(_shape_3d_large(), dtype=np.int8) 

1580 mask[1:-1, 1:-1, 1:-1] = 1 

1581 mask_img_1 = Nifti1Image(mask, _affine_eye()) 

1582 

1583 mask = np.zeros(_shape_3d_large(), dtype=np.int8) 

1584 mask[3:-3, 3:-3, 3:-3] = 1 

1585 mask_img_2 = Nifti1Image(mask, _affine_eye()) 

1586 else: 

1587 mask_img_1 = _make_surface_mask() 

1588 data = { 

1589 part: np.ones(mask_img_1.data.parts[part].shape) 

1590 for part in mask_img_1.data.parts 

1591 } 

1592 mask_img_2 = SurfaceImage(mask_img_1.mesh, data) 

1593 

1594 estimator.mask_img = mask_img_1 

1595 estimator.fit() 

1596 fitted_mask_1 = estimator.mask_img_ 

1597 

1598 estimator.mask_img = mask_img_2 

1599 estimator.fit() 

1600 fitted_mask_2 = estimator.mask_img_ 

1601 

1602 if accept_niimg_input(estimator): 

1603 with pytest.raises(AssertionError): 

1604 assert_array_equal( 

1605 fitted_mask_1.get_fdata(), fitted_mask_2.get_fdata() 

1606 ) 

1607 else: 

1608 with pytest.raises(ValueError): 

1609 assert_surface_image_equal(fitted_mask_1, fitted_mask_2) 

1610 

1611 

1612def check_masker_empty_data_messages(estimator): 

1613 """Check that empty images are caught properly. 

1614 

1615 Replaces sklearn check_estimators_empty_data_messages. 

1616 

1617 Not implemented for nifti maskers for performance reasons. 

1618 See : https://github.com/nilearn/nilearn/pull/5293#issuecomment-2977170723 

1619 """ 

1620 if accept_niimg_input(estimator): 

1621 return None 

1622 

1623 else: 

1624 imgs = _make_surface_img() 

1625 data = { 

1626 part: np.empty(0).reshape((imgs.data.parts[part].shape[0], 0)) 

1627 for part in imgs.data.parts 

1628 } 

1629 imgs = SurfaceImage(imgs.mesh, data) 

1630 

1631 mask_img = _make_surface_mask() 

1632 

1633 with pytest.raises(ValueError, match="empty"): 

1634 estimator.fit(imgs) 

1635 

1636 estimator.mask_img = mask_img 

1637 estimator.fit() 

1638 with pytest.raises(ValueError, match="empty"): 

1639 estimator.transform(imgs) 

1640 

1641 

1642# ------------------ SURFACE MASKER CHECKS ------------------ 

1643 

1644 

1645def check_masker_fit_with_empty_mask(estimator): 

1646 """Check mask that excludes all voxels raise an error.""" 

1647 if accept_niimg_input(estimator): 

1648 mask_img = _img_3d_zeros() 

1649 imgs = [_img_3d_rand()] 

1650 else: 

1651 mask_img = _make_surface_mask() 

1652 for k, v in mask_img.data.parts.items(): 

1653 mask_img.data.parts[k] = np.zeros(v.shape) 

1654 imgs = _make_surface_img(1) 

1655 

1656 estimator.mask_img = mask_img 

1657 with pytest.raises( 

1658 ValueError, 

1659 match="The mask is invalid as it is empty: it masks all data", 

1660 ): 

1661 estimator.fit(imgs) 

1662 

1663 

1664def check_masker_fit_with_non_finite_in_mask(estimator): 

1665 """Check mask with non finite values can be used with maskers. 

1666 

1667 - Warning is thrown. 

1668 - Output of transform must contain only finite values. 

1669 """ 

1670 if accept_niimg_input(estimator): 

1671 # _shape_3d_large() is used, 

1672 # this test would fail for RegionExtractor otherwise 

1673 mask = np.ones(_shape_3d_large()) 

1674 mask[:, :, 7] = np.nan 

1675 mask[:, :, 4] = np.inf 

1676 mask_img = Nifti1Image(mask, affine=_affine_eye()) 

1677 

1678 imgs = _img_3d_rand() 

1679 

1680 else: 

1681 mask_img = _make_surface_mask() 

1682 for k, v in mask_img.data.parts.items(): 

1683 mask_img.data.parts[k] = np.zeros(v.shape) 

1684 mask_img.data.parts["left"][0:3, 0] = [np.nan, np.inf, 1] 

1685 mask_img.data.parts["right"][0:3, 0] = [np.nan, np.inf, 1] 

1686 

1687 imgs = _make_surface_img(1) 

1688 

1689 estimator.mask_img = mask_img 

1690 with pytest.warns(UserWarning, match="Non-finite values detected."): 

1691 estimator.fit() 

1692 

1693 signal = estimator.transform(imgs) 

1694 assert np.all(np.isfinite(signal)) 

1695 

1696 

1697def check_masker_dtypes(estimator): 

1698 """Check masker can fit/transform with inputs of varying dtypes. 

1699 

1700 Replacement for sklearn check_estimators_dtypes. 

1701 

1702 np.int64 not tested: see no_int64_nifti in nilearn/conftest.py 

1703 """ 

1704 length = 20 

1705 for dtype in [np.float32, np.float64, np.int32]: 

1706 estimator = clone(estimator) 

1707 

1708 if accept_niimg_input(estimator): 

1709 data = np.zeros((*_shape_3d_large(), length)) 

1710 data[1:28, 1:28, 1:28, ...] = ( 

1711 _rng().random((27, 27, 27, length)) + 2.0 

1712 ) 

1713 imgs = Nifti1Image(data.astype(dtype), affine=_affine_eye()) 

1714 

1715 else: 

1716 imgs = _make_surface_img(length) 

1717 for k, v in imgs.data.parts.items(): 

1718 imgs.data.parts[k] = v.astype(dtype) 

1719 

1720 estimator.fit(imgs) 

1721 estimator.transform(imgs) 

1722 

1723 

1724def check_masker_smooth(estimator): 

1725 """Check that masker can smooth data when extracting. 

1726 

1727 Check that masker instance has smoothing_fwhm attribute. 

1728 Check that output is different with and without smoothing. 

1729 

1730 For Surface maskers: 

1731 - Check smoothing on surface maskers raises NotImplemented warning. 

1732 - Check that output is the same with and without smoothing. 

1733 TODO: update once smoothing is implemented. 

1734 """ 

1735 assert hasattr(estimator, "smoothing_fwhm") 

1736 

1737 if accept_niimg_input(estimator): 

1738 imgs = _img_3d_rand() 

1739 else: 

1740 n_sample = 1 

1741 imgs = _make_surface_img(n_sample) 

1742 

1743 signal = estimator.fit_transform(imgs) 

1744 

1745 estimator.smoothing_fwhm = 3 

1746 estimator.fit(imgs) 

1747 

1748 if accept_niimg_input(estimator): 

1749 smoothed_signal = estimator.transform(imgs) 

1750 

1751 assert_raises( 

1752 AssertionError, assert_array_equal, smoothed_signal, signal 

1753 ) 

1754 

1755 else: 

1756 with pytest.warns(UserWarning, match="not yet supported"): 

1757 smoothed_signal = estimator.transform(imgs) 

1758 

1759 assert_array_equal(smoothed_signal, signal) 

1760 

1761 

1762def check_masker_inverse_transform(estimator) -> None: 

1763 """Check output of inverse_transform. 

1764 

1765 For signal with 1 or more samples. 

1766 

1767 For nifti maskers: 

1768 - 1D arrays -> 3D images 

1769 - 2D arrays -> 4D images 

1770 

1771 For surface maskers: 

1772 - 1D arrays -> 1D images 

1773 - 2D arrays -> 2D images 

1774 

1775 Check that running transform() is not required to run inverse_transform(). 

1776 

1777 Check that running inverse_transform() before and after running transform() 

1778 give same result. 

1779 

1780 Check that the proper error is thrown, if signal has the wrong shape. 

1781 """ 

1782 if accept_niimg_input(estimator): 

1783 # using different shape for imgs, mask 

1784 # to force resampling 

1785 input_shape = (28, 29, 30) 

1786 imgs = Nifti1Image(_rng().random(input_shape), _affine_eye()) 

1787 

1788 mask_shape = (15, 16, 17) 

1789 mask_img = Nifti1Image(np.ones(mask_shape), _affine_eye()) 

1790 

1791 if isinstance(estimator, NiftiSpheresMasker): 

1792 tmp = mask_img.shape 

1793 else: 

1794 tmp = input_shape 

1795 expected_shapes = [tmp, (*tmp, 1), (*tmp, 10)] 

1796 

1797 else: 

1798 imgs = _make_surface_img(1) 

1799 

1800 mask_img = _make_surface_mask() 

1801 

1802 expected_shapes = [ 

1803 (imgs.shape[0],), 

1804 (imgs.shape[0], 1), 

1805 (imgs.shape[0], 10), 

1806 ] 

1807 

1808 for i, expected_shape in enumerate( 

1809 expected_shapes, 

1810 ): 

1811 estimator = clone(estimator) 

1812 

1813 if isinstance(estimator, (NiftiSpheresMasker)): 

1814 estimator.mask_img = mask_img 

1815 

1816 estimator.fit(imgs) 

1817 

1818 if i == 0: 

1819 signals = _rng().random((estimator.n_elements_,)) 

1820 elif i == 1: 

1821 signals = _rng().random((1, estimator.n_elements_)) 

1822 elif i == 2: 

1823 signals = _rng().random((10, estimator.n_elements_)) 

1824 

1825 new_imgs = estimator.inverse_transform(signals) 

1826 

1827 if accept_niimg_input(estimator): 

1828 actual_shape = new_imgs.shape 

1829 assert_array_almost_equal(imgs.affine, new_imgs.affine) 

1830 else: 

1831 actual_shape = new_imgs.data.shape 

1832 assert actual_shape == expected_shape 

1833 

1834 # same result before and after running transform() 

1835 estimator.transform(imgs) 

1836 

1837 new_imgs_2 = estimator.inverse_transform(signals) 

1838 

1839 if accept_niimg_input(estimator): 

1840 assert check_imgs_equal(new_imgs, new_imgs_2) 

1841 else: 

1842 assert_surface_image_equal(new_imgs, new_imgs_2) 

1843 

1844 signals = _rng().random((1, estimator.n_elements_ + 1)) 

1845 with pytest.raises( 

1846 ValueError, match="Input to 'inverse_transform' has wrong shape." 

1847 ): 

1848 estimator.inverse_transform(signals) 

1849 

1850 

1851def check_masker_transform_resampling(estimator) -> None: 

1852 """Check transform / inverse_transform for maskers with resampling. 

1853 

1854 Similar to check_masker_inverse_transform 

1855 but for nifti masker that can do some resampling 

1856 (labels and maps maskers). 

1857 

1858 Check that output has the shape of the data or the labels/maps image 

1859 depending on which resampling_target was requested at init. 

1860 

1861 Check that using a mask does not affect shape of output. 

1862 

1863 Check that running transform() is not required to run inverse_transform(). 

1864 

1865 Check that running inverse_transform() before and after running transform() 

1866 give same result. 

1867 

1868 Check that running transform on images with different fov 

1869 than those used at fit is possible. 

1870 """ 

1871 if not hasattr(estimator, "resampling_target"): 

1872 return None 

1873 

1874 # using different shape for imgs, mask 

1875 # to force resampling 

1876 n_sample = 10 

1877 input_shape = (28, 29, 30, n_sample) 

1878 imgs = Nifti1Image(_rng().random(input_shape), _affine_eye()) 

1879 

1880 imgs2 = Nifti1Image(_rng().random((20, 21, 22)), _affine_eye()) 

1881 

1882 mask_shape = (15, 16, 17) 

1883 mask_img = Nifti1Image(np.ones(mask_shape), _affine_eye()) 

1884 

1885 for resampling_target in ["data", "labels"]: 

1886 expected_shape = input_shape 

1887 if resampling_target == "labels": 

1888 if isinstance(estimator, NiftiMapsMasker): 

1889 expected_shape = (*estimator.maps_img.shape[:3], n_sample) 

1890 resampling_target = "maps" 

1891 else: 

1892 expected_shape = (*estimator.labels_img.shape, n_sample) 

1893 

1894 for mask in [None, mask_img]: 

1895 estimator = clone(estimator) 

1896 estimator.resampling_target = resampling_target 

1897 estimator.mask_img = mask 

1898 

1899 # no resampling warning at fit time 

1900 with warnings.catch_warnings(record=True) as warning_list: 

1901 estimator.fit(imgs) 

1902 assert all( 

1903 "at transform time" not in str(x.message) for x in warning_list 

1904 ) 

1905 

1906 signals = _rng().random((n_sample, estimator.n_elements_)) 

1907 

1908 new_imgs = estimator.inverse_transform(signals) 

1909 

1910 assert_array_almost_equal(imgs.affine, new_imgs.affine) 

1911 actual_shape = new_imgs.shape 

1912 assert actual_shape == expected_shape 

1913 

1914 # no resampling warning when using same imgs as for fit() 

1915 with warnings.catch_warnings(record=True) as warning_list: 

1916 estimator.transform(imgs) 

1917 assert all( 

1918 "at transform time" not in str(x.message) for x in warning_list 

1919 ) 

1920 

1921 # same result before and after running transform() 

1922 new_imgs_2 = estimator.inverse_transform(signals) 

1923 

1924 assert check_imgs_equal(new_imgs, new_imgs_2) 

1925 

1926 # no error transforming an image with different fov 

1927 # than the one used at fit time, 

1928 # but there should be a resampling warning 

1929 # we are resampling to data 

1930 with warnings.catch_warnings(record=True) as warning_list: 

1931 estimator.transform(imgs2) 

1932 if resampling_target == "data": 

1933 assert any( 

1934 "at transform time" in str(x.message) for x in warning_list 

1935 ) 

1936 else: 

1937 assert all( 

1938 "at transform time" not in str(x.message) 

1939 for x in warning_list 

1940 ) 

1941 

1942 

1943def check_masker_fit_score_takes_y(estimator): 

1944 """Replace sklearn check_fit_score_takes_y for maskers. 

1945 

1946 Check that all estimators accept an optional y 

1947 in fit and score so they can be used in pipelines. 

1948 """ 

1949 for attr in ["fit", "fit_transform"]: 

1950 tmp = { 

1951 k: v.default 

1952 for k, v in inspect.signature( 

1953 getattr(estimator, attr) 

1954 ).parameters.items() 

1955 if v.default is not inspect.Parameter.empty 

1956 } 

1957 if "y" not in tmp: 

1958 raise ValueError( 

1959 f"{estimator.__class__.__name__} " 

1960 f"is missing 'y=None' for the method '{attr}'." 

1961 ) 

1962 assert tmp["y"] is None 

1963 

1964 

1965# ------------------ SURFACE MASKER CHECKS ------------------ 

1966 

1967 

1968def check_surface_masker_fit_with_mask(estimator): 

1969 """Check fit / transform with mask provided at init. 

1970 

1971 Check with 2D and 1D images. 

1972 

1973 1D image -> 1D array 

1974 2D image -> 2D array 

1975 

1976 Also check 'shape' errors between images to fit and mask. 

1977 """ 

1978 mask_img = _make_surface_mask() 

1979 

1980 # 1D image 

1981 mesh = _make_mesh() 

1982 data = {} 

1983 for k, v in mesh.parts.items(): 

1984 data_shape = (v.n_vertices,) 

1985 data[k] = _rng().random(data_shape) 

1986 imgs = SurfaceImage(mesh, data) 

1987 assert imgs.shape == (9,) 

1988 estimator.fit(imgs) 

1989 

1990 signal = estimator.transform(imgs) 

1991 

1992 assert isinstance(signal, np.ndarray) 

1993 assert signal.shape == (estimator.n_elements_,) 

1994 

1995 # 2D image with 1 sample 

1996 imgs = _make_surface_img(1) 

1997 estimator.mask_img = mask_img 

1998 estimator.fit(imgs) 

1999 

2000 signal = estimator.transform(imgs) 

2001 

2002 assert isinstance(signal, np.ndarray) 

2003 assert signal.shape == (1, estimator.n_elements_) 

2004 

2005 # 2D image with several samples 

2006 imgs = _make_surface_img(5) 

2007 estimator = clone(estimator) 

2008 estimator.mask_img = mask_img 

2009 estimator.fit(imgs) 

2010 

2011 signal = estimator.transform(imgs) 

2012 

2013 assert isinstance(signal, np.ndarray) 

2014 assert signal.shape == (5, estimator.n_elements_) 

2015 

2016 # errors 

2017 with pytest.raises( 

2018 MeshDimensionError, 

2019 match="Number of vertices do not match for between meshes.", 

2020 ): 

2021 estimator.fit(_flip_surf_img(imgs)) 

2022 with pytest.raises( 

2023 MeshDimensionError, 

2024 match="Number of vertices do not match for between meshes.", 

2025 ): 

2026 estimator.transform(_flip_surf_img(imgs)) 

2027 

2028 with pytest.raises( 

2029 MeshDimensionError, match="PolyMeshes do not have the same keys." 

2030 ): 

2031 estimator.fit(_drop_surf_img_part(imgs)) 

2032 with pytest.raises( 

2033 MeshDimensionError, match="PolyMeshes do not have the same keys." 

2034 ): 

2035 estimator.transform(_drop_surf_img_part(imgs)) 

2036 

2037 

2038def check_surface_masker_list_surf_images(estimator): 

2039 """Test transform / inverse_transform on list of surface images. 

2040 

2041 Check that 1D or 2D mask work. 

2042 

2043 transform 

2044 - list of 1D -> 2D array 

2045 - list of 2D -> 2D array 

2046 """ 

2047 n_sample = 5 

2048 images_to_transform = [ 

2049 [_make_surface_img()] * 5, 

2050 [_make_surface_img(2), _make_surface_img(3)], 

2051 ] 

2052 for imgs in images_to_transform: 

2053 for mask_img in [None, _surf_mask_1d(), _make_surface_mask()]: 

2054 estimator.mask_img = mask_img 

2055 

2056 estimator = estimator.fit(imgs) 

2057 

2058 signals = estimator.transform(imgs) 

2059 

2060 assert signals.shape == (n_sample, estimator.n_elements_) 

2061 

2062 img = estimator.inverse_transform(signals) 

2063 

2064 assert img.shape == (_make_surface_img().mesh.n_vertices, n_sample) 

2065 

2066 

2067# ------------------ NIFTI MASKER CHECKS ------------------ 

2068 

2069 

2070def check_nifti_masker_fit_transform(estimator): 

2071 """Run several checks on maskers. 

2072 

2073 - can fit 3D / 4D image 

2074 - fitted maskers can transform: 

2075 - 3D image 

2076 - list of 3D images with same affine 

2077 - array from transformed 3D images should have 1D 

2078 - array from transformed 4D images should have 2D 

2079 """ 

2080 estimator.fit(_img_3d_rand()) 

2081 

2082 # 3D images 

2083 signal = estimator.transform(_img_3d_rand()) 

2084 

2085 assert isinstance(signal, np.ndarray) 

2086 assert signal.shape == (estimator.n_elements_,) 

2087 

2088 signal_2 = estimator.fit_transform(_img_3d_rand()) 

2089 

2090 assert_array_equal(signal, signal_2) 

2091 

2092 # list of 3D images 

2093 signal = estimator.transform([_img_3d_rand(), _img_3d_rand()]) 

2094 

2095 if is_multimasker(estimator): 

2096 assert isinstance(signal, list) 

2097 assert len(signal) == 2 

2098 for x in signal: 

2099 assert isinstance(x, np.ndarray) 

2100 assert x.ndim == 1 

2101 assert x.shape == (estimator.n_elements_,) 

2102 else: 

2103 assert isinstance(signal, np.ndarray) 

2104 assert signal.ndim == 2 

2105 assert signal.shape[1] == estimator.n_elements_ 

2106 

2107 # 4D images 

2108 signal = estimator.transform(_img_4d_rand_eye()) 

2109 

2110 assert isinstance(signal, np.ndarray) 

2111 assert signal.ndim == 2 

2112 assert signal.shape == (_img_4d_rand_eye().shape[3], estimator.n_elements_) 

2113 

2114 

2115def check_nifti_masker_fit_transform_5d(estimator): 

2116 """Run checks on nifti maskers for transforming 5D images. 

2117 

2118 - multi masker should be fine 

2119 and return a list of 2D numpy arrays 

2120 - non multimasker should fail 

2121 """ 

2122 n_subject = 3 

2123 

2124 estimator.fit(_img_3d_rand()) 

2125 

2126 input_5d_img = [_img_4d_rand_eye() for _ in range(n_subject)] 

2127 

2128 if not is_multimasker(estimator): 

2129 with pytest.raises( 

2130 DimensionError, 

2131 match="Input data has incompatible dimensionality: " 

2132 "Expected dimension is 4D and you provided " 

2133 "a list of 4D images \\(5D\\).", 

2134 ): 

2135 estimator.transform(input_5d_img) 

2136 

2137 with pytest.raises( 

2138 DimensionError, 

2139 match="Input data has incompatible dimensionality: " 

2140 "Expected dimension is 4D and you provided " 

2141 "a list of 4D images \\(5D\\).", 

2142 ): 

2143 estimator.fit_transform(input_5d_img) 

2144 

2145 else: 

2146 signal = estimator.transform(input_5d_img) 

2147 

2148 assert isinstance(signal, list) 

2149 assert all(isinstance(x, np.ndarray) for x in signal) 

2150 assert len(signal) == n_subject 

2151 assert all(x.ndim == 2 for x in signal) 

2152 

2153 signal = estimator.fit_transform(input_5d_img) 

2154 

2155 assert isinstance(signal, list) 

2156 assert all(isinstance(x, np.ndarray) for x in signal) 

2157 assert len(signal) == n_subject 

2158 assert all(x.ndim == 2 for x in signal) 

2159 

2160 

2161def check_nifti_masker_clean_error(estimator): 

2162 """Nifti maskers cannot be given cleaning parameters \ 

2163 via both clean_args and kwargs simultaneously. 

2164 

2165 TODO remove after nilearn 0.13.0 

2166 """ 

2167 input_img = _img_4d_rand_eye_medium() 

2168 

2169 estimator.t_r = 2.0 

2170 estimator.high_pass = 1 / 128 

2171 estimator.clean_kwargs = {"clean__filter": "cosine"} 

2172 estimator.clean_args = {"filter": "cosine"} 

2173 

2174 error_msg = ( 

2175 "Passing arguments via 'kwargs' " 

2176 "is mutually exclusive with using 'clean_args'" 

2177 ) 

2178 with pytest.raises(ValueError, match=error_msg): 

2179 estimator.fit(input_img) 

2180 

2181 

2182def check_nifti_masker_clean_warning(estimator): 

2183 """Nifti maskers raise warning if cleaning parameters \ 

2184 passed via kwargs. 

2185 

2186 But this still affects the transformed signal. 

2187 

2188 TODO remove after nilearn 0.13.0 

2189 """ 

2190 input_img = _img_4d_rand_eye_medium() 

2191 

2192 signal = estimator.fit_transform(input_img) 

2193 

2194 estimator.t_r = 2.0 

2195 estimator.high_pass = 1 / 128 

2196 estimator.clean_kwargs = {"clean__filter": "cosine"} 

2197 

2198 with pytest.warns(DeprecationWarning, match="You passed some kwargs"): 

2199 estimator.fit(input_img) 

2200 

2201 detrended_signal = estimator.transform(input_img) 

2202 

2203 assert_raises(AssertionError, assert_array_equal, detrended_signal, signal) 

2204 

2205 

2206def check_nifti_masker_fit_transform_files(estimator): 

2207 """Check that nifti maskers can work directly on files.""" 

2208 with TemporaryDirectory() as tmp_dir: 

2209 filename = write_imgs_to_path( 

2210 _img_3d_rand(), 

2211 file_path=Path(tmp_dir), 

2212 create_files=True, 

2213 ) 

2214 

2215 estimator.fit(filename) 

2216 estimator.transform(filename) 

2217 estimator.fit_transform(filename) 

2218 

2219 

2220def check_nifti_masker_dtype(estimator): 

2221 """Check dtype of output of maskers.""" 

2222 data_32 = _rng().random(_shape_3d_default(), dtype=np.float32) 

2223 affine_32 = np.eye(4, dtype=np.float32) 

2224 img_32 = Nifti1Image(data_32, affine_32) 

2225 

2226 data_64 = _rng().random(_shape_3d_default(), dtype=np.float64) 

2227 affine_64 = np.eye(4, dtype=np.float64) 

2228 img_64 = Nifti1Image(data_64, affine_64) 

2229 

2230 for img in [img_32, img_64]: 

2231 estimator = clone(estimator) 

2232 estimator.dtype = "auto" 

2233 assert estimator.fit_transform(img).dtype == np.float32 

2234 

2235 for img in [img_32, img_64]: 

2236 estimator = clone(estimator) 

2237 estimator.dtype = "float64" 

2238 assert estimator.fit_transform(img).dtype == np.float64 

2239 

2240 

2241def check_nifti_masker_fit_with_3d_mask(estimator): 

2242 """Check 3D mask can be used with nifti maskers. 

2243 

2244 Mask can have different shape than fitted image. 

2245 """ 

2246 # _shape_3d_large() is used 

2247 # this test would fail for RegionExtractor otherwise 

2248 mask = np.ones(_shape_3d_large()) 

2249 mask_img = Nifti1Image(mask, affine=_affine_eye()) 

2250 

2251 estimator.mask_img = mask_img 

2252 

2253 assert not hasattr(estimator, "mask_img_") 

2254 

2255 estimator.fit([_img_3d_rand()]) 

2256 

2257 assert hasattr(estimator, "mask_img_") 

2258 

2259 

2260# ------------------ MULTI NIFTI MASKER CHECKS ------------------ 

2261 

2262 

2263def check_multi_masker_with_confounds(estimator): 

2264 """Test multi maskers with a list of confounds. 

2265 

2266 Ensure results is different than when not using confounds. 

2267 

2268 Check that confounds are applied when passing a 4D image (not iterable) 

2269 to transform. 

2270 

2271 Check that error is raised if number of confounds 

2272 does not match number of images. 

2273 """ 

2274 length = _img_4d_rand_eye_medium().shape[3] 

2275 

2276 array = _rng().random((length, 3)) 

2277 

2278 signals_list_1 = estimator.fit_transform( 

2279 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2280 ) 

2281 signals_list_2 = estimator.fit_transform( 

2282 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2283 confounds=[array, array], 

2284 ) 

2285 

2286 for signal_1, signal_2 in zip(signals_list_1, signals_list_2): 

2287 assert_raises(AssertionError, assert_array_equal, signal_1, signal_2) 

2288 

2289 # should also work with a single 4D image (has no __iter__ ) 

2290 signals_list_1 = estimator.fit_transform(_img_4d_rand_eye_medium()) 

2291 signals_list_2 = estimator.fit_transform( 

2292 _img_4d_rand_eye_medium(), 

2293 confounds=[array], 

2294 ) 

2295 for signal_1, signal_2 in zip(signals_list_1, signals_list_2): 

2296 assert_raises(AssertionError, assert_array_equal, signal_1, signal_2) 

2297 

2298 # Mismatch n imgs and n confounds 

2299 with pytest.raises( 

2300 ValueError, match="number of confounds .* unequal to number of images" 

2301 ): 

2302 estimator.fit_transform( 

2303 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2304 confounds=[array], 

2305 ) 

2306 

2307 with pytest.raises( 

2308 TypeError, match="'confounds' must be a None or a list." 

2309 ): 

2310 estimator.fit_transform( 

2311 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2312 confounds=1, 

2313 ) 

2314 

2315 

2316def check_multi_masker_transformer_sample_mask(estimator): 

2317 """Test multi maskers with a list of "sample_mask". 

2318 

2319 "sample_mask" was directly sent as input to the parallel calls of 

2320 "transform_single_imgs" instead of sending iterations. 

2321 See https://github.com/nilearn/nilearn/issues/3967 for more details. 

2322 """ 

2323 length = _img_4d_rand_eye_medium().shape[3] 

2324 

2325 n_scrub1 = 3 

2326 n_scrub2 = 2 

2327 

2328 sample_mask1 = np.arange(length - n_scrub1) 

2329 sample_mask2 = np.arange(length - n_scrub2) 

2330 

2331 signals_list = estimator.fit_transform( 

2332 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2333 sample_mask=[sample_mask1, sample_mask2], 

2334 ) 

2335 

2336 for ts, n_scrub in zip(signals_list, [n_scrub1, n_scrub2]): 

2337 assert ts.shape[0] == length - n_scrub 

2338 

2339 # should also work with a single 4D image (has no __iter__ ) 

2340 signals_list = estimator.fit_transform( 

2341 _img_4d_rand_eye_medium(), 

2342 sample_mask=[sample_mask1], 

2343 ) 

2344 

2345 assert signals_list.shape[0] == length - n_scrub1 

2346 

2347 with pytest.raises( 

2348 ValueError, 

2349 match="number of sample_mask .* unequal to number of images", 

2350 ): 

2351 estimator.fit_transform( 

2352 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2353 sample_mask=[sample_mask1], 

2354 ) 

2355 

2356 with pytest.raises( 

2357 TypeError, match="'sample_mask' must be a None or a list." 

2358 ): 

2359 estimator.fit_transform( 

2360 [_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()], 

2361 sample_mask=1, 

2362 ) 

2363 

2364 

2365def check_multi_masker_transformer_high_variance_confounds(estimator): 

2366 """Check high_variance_confounds use in multi maskers with 5D data. 

2367 

2368 Make sure that using high_variance_confounds returns different result. 

2369 

2370 Ensure that high_variance_confounds can be used with regular confounds, 

2371 and that results are different than when just using the confounds alone. 

2372 """ 

2373 length = 20 

2374 

2375 data = _rng().random((*_shape_3d_default(), length)) 

2376 input_img = Nifti1Image(data, _affine_eye()) 

2377 

2378 estimator.high_variance_confounds = False 

2379 

2380 signal = estimator.fit_transform([input_img, input_img]) 

2381 

2382 estimator = clone(estimator) 

2383 estimator.high_variance_confounds = True 

2384 

2385 signal_hvc = estimator.fit_transform([input_img, input_img]) 

2386 

2387 for s1, s2 in zip(signal, signal_hvc): 

2388 assert_raises(AssertionError, assert_array_equal, s1, s2) 

2389 

2390 with TemporaryDirectory() as tmp_dir: 

2391 array = _rng().random((length, 3)) 

2392 

2393 dataframe = pd.DataFrame(array) 

2394 

2395 tmp_dir = Path(tmp_dir) 

2396 dataframe.to_csv(tmp_dir / "confounds.csv") 

2397 

2398 for c in [array, dataframe, tmp_dir / "confounds.csv"]: 

2399 confounds = [c, c] 

2400 

2401 estimator = clone(estimator) 

2402 estimator.high_variance_confounds = False 

2403 signal_c = estimator.fit_transform( 

2404 [input_img, input_img], confounds=confounds 

2405 ) 

2406 

2407 estimator = clone(estimator) 

2408 estimator.high_variance_confounds = True 

2409 signal_c_hvc = estimator.fit_transform( 

2410 [input_img, input_img], confounds=confounds 

2411 ) 

2412 

2413 for s1, s2 in zip(signal_c, signal_c_hvc): 

2414 assert_raises(AssertionError, assert_array_equal, s1, s2) 

2415 

2416 

2417# ------------------ GLM CHECKS ------------------ 

2418 

2419 

2420def check_glm_empty_data_messages(estimator: BaseEstimator) -> None: 

2421 """Check that empty images are caught properly. 

2422 

2423 Replaces sklearn check_estimators_empty_data_messages. 

2424 

2425 Not implemented for nifti data for performance reasons. 

2426 See : https://github.com/nilearn/nilearn/pull/5293#issuecomment-2977170723 

2427 """ 

2428 imgs, design_matrices = _make_surface_img_and_design() 

2429 

2430 data = { 

2431 part: np.empty(0).reshape((imgs.data.parts[part].shape[0], 0)) 

2432 for part in imgs.data.parts 

2433 } 

2434 imgs = SurfaceImage(imgs.mesh, data) 

2435 

2436 with pytest.raises(ValueError, match="empty"): 

2437 # FirstLevel 

2438 if hasattr(estimator, "hrf_model"): 

2439 estimator.fit(imgs, design_matrices=design_matrices) 

2440 # SecondLevel 

2441 else: 

2442 estimator.fit(imgs, design_matrix=design_matrices) 

2443 

2444 

2445def check_glm_is_fitted(estimator): 

2446 """Check glm throws proper error when not fitted.""" 

2447 with pytest.raises(ValueError, match=_not_fitted_error_message(estimator)): 

2448 estimator.compute_contrast([]) 

2449 

2450 data, design_matrices = _make_surface_img_and_design() 

2451 # FirstLevel 

2452 if hasattr(estimator, "hrf_model"): 

2453 estimator.fit(data, design_matrices=design_matrices) 

2454 # SecondLevel 

2455 else: 

2456 estimator.fit(data, design_matrix=design_matrices) 

2457 

2458 assert estimator.__sklearn_is_fitted__() 

2459 

2460 check_is_fitted(estimator) 

2461 

2462 

2463def check_glm_dtypes(estimator): 

2464 """Check glm can fit with inputs of varying dtypes. 

2465 

2466 Replacement for sklearn check_estimators_dtypes. 

2467 

2468 np.int64 not tested: see no_int64_nifti in nilearn/conftest.py 

2469 """ 

2470 imgs, design_matrices = _make_surface_img_and_design() 

2471 

2472 for dtype in [np.float32, np.float64, np.int32]: 

2473 estimator = clone(estimator) 

2474 

2475 for k, v in imgs.data.parts.items(): 

2476 imgs.data.parts[k] = v.astype(dtype) 

2477 

2478 # FirstLevel 

2479 if hasattr(estimator, "hrf_model"): 

2480 estimator.fit(imgs, design_matrices=design_matrices) 

2481 # SecondLevel 

2482 else: 

2483 estimator.fit(imgs, design_matrix=design_matrices) 

2484 

2485 

2486# ------------------ REPORT GENERATION CHECKS ------------------ 

2487 

2488 

2489def _generate_report_with_no_warning(estimator): 

2490 """Check that report generation throws no warning.""" 

2491 with warnings.catch_warnings(record=True) as warning_list: 

2492 report = _generate_report(estimator) 

2493 

2494 # TODO 

2495 # RegionExtractor, SurfaceMapsMasker still throws too many warnings 

2496 warnings_to_ignore = [ 

2497 # only thrown with older dependencies 

2498 "No contour levels were found within the data range.", 

2499 ] 

2500 unknown_warnings = [ 

2501 str(x.message) 

2502 for x in warning_list 

2503 if str(x.message) not in warnings_to_ignore 

2504 ] 

2505 if not isinstance(estimator, (RegionExtractor, SurfaceMapsMasker)): 

2506 assert not unknown_warnings, unknown_warnings 

2507 

2508 _check_html(report) 

2509 

2510 return report 

2511 

2512 

2513def _generate_report(estimator): 

2514 """Adapt the call to generate_report to limit warnings. 

2515 

2516 For example by only passing the number of displayed maps 

2517 that a map masker contains. 

2518 """ 

2519 if isinstance( 

2520 estimator, 

2521 (NiftiMapsMasker, MultiNiftiMapsMasker, SurfaceMapsMasker), 

2522 ) and hasattr(estimator, "n_elements_"): 

2523 return estimator.generate_report(displayed_maps=estimator.n_elements_) 

2524 else: 

2525 return estimator.generate_report() 

2526 

2527 

2528def check_masker_generate_report(estimator): 

2529 """Check that maskers can generate report. 

2530 

2531 - check that we get a warning: 

2532 - when matplotlib is not installed 

2533 - when generating reports before fit 

2534 - check content of report before fit and after fit 

2535 

2536 """ 

2537 if not is_matplotlib_installed(): 

2538 with warnings.catch_warnings(record=True) as warning_list: 

2539 report = _generate_report(estimator) 

2540 

2541 assert len(warning_list) == 1 

2542 assert issubclass(warning_list[0].category, ImportWarning) 

2543 assert report == [None] 

2544 

2545 return 

2546 

2547 with warnings.catch_warnings(record=True) as warning_list: 

2548 report = _generate_report(estimator) 

2549 assert len(warning_list) == 1 

2550 

2551 _check_html(report, is_fit=False) 

2552 assert "Make sure to run `fit`" in str(report) 

2553 

2554 if accept_niimg_input(estimator): 

2555 input_img = _img_3d_rand() 

2556 else: 

2557 input_img = _make_surface_img(2) 

2558 

2559 estimator.fit(input_img) 

2560 

2561 assert estimator._report_content["warning_message"] is None 

2562 

2563 # TODO 

2564 # SurfaceMapsMasker, RegionExtractor still throws a warning 

2565 report = _generate_report_with_no_warning(estimator) 

2566 report = _generate_report(estimator) 

2567 _check_html(report) 

2568 

2569 with TemporaryDirectory() as tmp_dir: 

2570 report.save_as_html(Path(tmp_dir) / "report.html") 

2571 assert (Path(tmp_dir) / "report.html").is_file() 

2572 

2573 

2574def check_nifti_masker_generate_report_after_fit_with_only_mask(estimator): 

2575 """Check 3D mask is enough to run with fit and generate report.""" 

2576 mask = np.ones(_shape_3d_large()) 

2577 mask_img = Nifti1Image(mask, affine=_affine_eye()) 

2578 

2579 estimator.mask_img = mask_img 

2580 

2581 assert not hasattr(estimator, "mask_img_") 

2582 

2583 estimator.fit() 

2584 

2585 assert estimator._report_content["warning_message"] is None 

2586 

2587 if not is_matplotlib_installed(): 

2588 return 

2589 

2590 with pytest.warns(UserWarning, match="No image provided to fit."): 

2591 report = _generate_report(estimator) 

2592 _check_html(report) 

2593 

2594 input_img = _img_4d_rand_eye_medium() 

2595 

2596 estimator.fit(input_img) 

2597 

2598 # TODO 

2599 # NiftiSpheresMasker still throws a warning 

2600 if isinstance(estimator, NiftiSpheresMasker): 

2601 return 

2602 report = _generate_report_with_no_warning(estimator) 

2603 _check_html(report) 

2604 

2605 

2606def check_masker_generate_report_false(estimator): 

2607 """Test with reports set to False.""" 

2608 if not is_matplotlib_installed(): 

2609 return 

2610 

2611 estimator.reports = False 

2612 

2613 if accept_niimg_input(estimator): 

2614 input_img = _img_4d_rand_eye_medium() 

2615 else: 

2616 input_img = _make_surface_img(2) 

2617 

2618 estimator.fit(input_img) 

2619 

2620 assert estimator._reporting_data is None 

2621 assert estimator._reporting() == [None] 

2622 with pytest.warns( 

2623 UserWarning, 

2624 match=("No visual outputs created."), 

2625 ): 

2626 report = _generate_report(estimator) 

2627 

2628 _check_html(report, reports_requested=False) 

2629 

2630 assert "Empty Report" in str(report) 

2631 

2632 

2633def check_multi_nifti_masker_generate_report_4d_fit(estimator): 

2634 """Test calling generate report on multiple subjects raises warning.""" 

2635 if not is_matplotlib_installed(): 

2636 return 

2637 

2638 estimator.maps_img = _img_3d_ones() 

2639 estimator.fit([_img_4d_rand_eye_medium(), _img_4d_rand_eye_medium()]) 

2640 with pytest.warns( 

2641 UserWarning, match="A list of 4D subject images were provided to fit. " 

2642 ): 

2643 _generate_report(estimator)