Coverage for nilearn/interfaces/fmriprep/tests/test_load_confounds.py: 0%

266 statements  

« prev     ^ index     » next       coverage.py v7.9.1, created at 2025-06-16 12:32 +0200

1import re 

2 

3import numpy as np 

4import pandas as pd 

5import pytest 

6from nibabel import Nifti1Image 

7from scipy.stats import pearsonr 

8from sklearn.preprocessing import scale 

9 

10from nilearn._utils.data_gen import create_fake_bids_dataset 

11from nilearn._utils.fmriprep_confounds import to_camel_case 

12from nilearn.conftest import _rng 

13from nilearn.interfaces.bids import get_bids_files 

14from nilearn.interfaces.fmriprep import load_confounds 

15from nilearn.interfaces.fmriprep.load_confounds import ( 

16 _check_strategy, 

17 _load_single_confounds_file, 

18) 

19from nilearn.interfaces.fmriprep.tests._testing import ( 

20 create_tmp_filepath, 

21 get_legal_confound, 

22) 

23from nilearn.maskers import NiftiMasker 

24from nilearn.tests.test_signal import generate_trends 

25 

26 

27def _simu_img(tmp_path, trend, demean): 

28 """Simulate an nifti image based on confound file \ 

29 with some parts confounds and some parts noise. 

30 """ 

31 file_nii, _ = create_tmp_filepath(tmp_path, copy_confounds=True) 

32 # set the size of the image matrix 

33 nx = 5 

34 ny = 5 

35 # the actual number of slices will actually be double of that 

36 # as we will stack slices with confounds on top of slices with noise 

37 nz = 2 

38 # Load a simple 6 parameters motion models as confounds 

39 # demean set to False just for simulating signal based on the original 

40 # state 

41 confounds, _ = load_confounds( 

42 file_nii, strategy=("motion",), motion="basic", demean=False 

43 ) 

44 

45 X = _handle_non_steady(confounds) 

46 X = X.to_numpy() 

47 # the number of time points is based on the example confound file 

48 nt = X.shape[0] 

49 # initialize an empty 4D volume 

50 vol = np.zeros([nx, ny, 2 * nz, nt]) 

51 vol_conf = np.zeros([nx, ny, 2 * nz]) 

52 vol_rand = np.zeros([nx, ny, 2 * nz]) 

53 

54 # create random noise and a random mixture of confounds standardized 

55 # to zero mean and unit variance 

56 rng = _rng() 

57 beta = rng.random((nx * ny * nz, X.shape[1])) 

58 tseries_rand = scale(rng.random((nx * ny * nz, nt)), axis=1) 

59 # create the confound mixture 

60 tseries_conf = scale(np.matmul(beta, X.transpose()), axis=1) 

61 

62 # fill the first half of the 4D data with the random mixture 

63 vol[:, :, 0:nz, :] = tseries_conf.reshape(nx, ny, nz, nt) 

64 vol_conf[:, :, 0:nz] = 1 

65 

66 # create random noise in the second half of the 4D data 

67 vol[:, :, range(nz, 2 * nz), :] = tseries_rand.reshape(nx, ny, nz, nt) 

68 vol_rand[:, :, range(nz, 2 * nz)] = 1 

69 

70 # Shift the mean to non-zero 

71 vol = vol + 10 

72 

73 # add a linear trend to the data 

74 if trend: 

75 signal_trend = generate_trends(n_features=nx * ny * 2 * nz, length=nt) 

76 vol += signal_trend.reshape(nx, ny, 2 * nz, nt) 

77 

78 # create an nifti image with the data, and corresponding mask 

79 img = Nifti1Image(vol, np.eye(4)) 

80 mask_conf = Nifti1Image(vol_conf, np.eye(4)) 

81 mask_rand = Nifti1Image(vol_rand, np.eye(4)) 

82 

83 # generate the associated confounds for testing 

84 test_confounds, _ = load_confounds( 

85 file_nii, strategy=("motion",), motion="basic", demean=demean 

86 ) 

87 # match how we extend the length to increase the degree of freedom 

88 test_confounds = _handle_non_steady(test_confounds) 

89 sample_mask = np.arange(test_confounds.shape[0])[1:] 

90 return img, mask_conf, mask_rand, test_confounds, sample_mask 

91 

92 

93def _handle_non_steady(confounds): 

94 """Simulate non steady state correctly while increase the length. 

95 

96 - The first row is non-steady state, 

97 replace it with the input from the second row. 

98 

99 - Repeat X in length (axis = 0) 10 times to increase 

100 the degree of freedom for numerical stability. 

101 

102 - Put non-steady state volume back at the first sample. 

103 """ 

104 X = confounds.to_numpy() 

105 non_steady = X[0, :] 

106 tmp = np.vstack((X[1, :], X[1:, :])) 

107 tmp = np.tile(tmp, (10, 1)) 

108 return pd.DataFrame( 

109 np.vstack((non_steady, tmp[1:, :])), columns=confounds.columns 

110 ) 

111 

112 

113def _regression(confounds, tmp_path): 

114 """Perform simple regression with NiftiMasker.""" 

115 # Simulate data 

116 img, mask_conf, _, _, _ = _simu_img(tmp_path, trend=False, demean=False) 

117 confounds = _handle_non_steady(confounds) 

118 # Do the regression 

119 masker = NiftiMasker(mask_img=mask_conf, standardize=True) 

120 tseries_clean = masker.fit_transform( 

121 img, confounds=confounds, sample_mask=None 

122 ) 

123 assert tseries_clean.shape[0] == confounds.shape[0] 

124 

125 

126@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

127@pytest.mark.filterwarnings("ignore") 

128@pytest.mark.parametrize( 

129 "test_strategy,param", 

130 [ 

131 (("motion",), {}), 

132 (("high_pass",), {}), 

133 (("wm_csf",), {"wm_csf": "full"}), 

134 (("global_signal",), {"global_signal": "full"}), 

135 (("high_pass", "compcor"), {}), 

136 (("high_pass", "compcor"), {"compcor": "anat_separated"}), 

137 (("high_pass", "compcor"), {"compcor": "temporal"}), 

138 (("ica_aroma",), {"ica_aroma": "basic"}), 

139 ], 

140) 

141def test_nilearn_regress(tmp_path, test_strategy, param, fmriprep_version): 

142 """Try regressing out all motion types without sample mask.""" 

143 img_nii, _ = create_tmp_filepath( 

144 tmp_path, 

145 copy_confounds=True, 

146 copy_json=True, 

147 fmriprep_version=fmriprep_version, 

148 ) 

149 if fmriprep_version == "21.x.x" and test_strategy == ("ica_aroma",): 

150 return 

151 confounds, _ = load_confounds(img_nii, strategy=test_strategy, **param) 

152 _regression(confounds, tmp_path) 

153 

154 

155def _tseries_std( 

156 img, 

157 mask_img, 

158 confounds, 

159 sample_mask, 

160 standardize_signal=False, 

161 standardize_confounds=True, 

162 detrend=False, 

163): 

164 """Get the std of time series in a mask.""" 

165 masker = NiftiMasker( 

166 mask_img=mask_img, 

167 standardize=standardize_signal, 

168 standardize_confounds=standardize_confounds, 

169 detrend=detrend, 

170 ) 

171 tseries = masker.fit_transform( 

172 img, confounds=confounds, sample_mask=sample_mask 

173 ) 

174 return tseries.std(axis=0) 

175 

176 

177def _denoise( 

178 img, 

179 mask_img, 

180 confounds, 

181 sample_mask, 

182 standardize_signal=False, 

183 standardize_confounds=True, 

184 detrend=False, 

185): 

186 """Extract time series with and without confounds.""" 

187 masker = NiftiMasker( 

188 mask_img=mask_img, 

189 standardize=standardize_signal, 

190 standardize_confounds=standardize_confounds, 

191 detrend=detrend, 

192 ) 

193 tseries_raw = masker.fit_transform(img, sample_mask=sample_mask) 

194 tseries_clean = masker.fit_transform( 

195 img, confounds=confounds, sample_mask=sample_mask 

196 ) 

197 return tseries_raw, tseries_clean 

198 

199 

200def _corr_tseries(tseries1, tseries2): 

201 """Compute the correlation between two sets of time series.""" 

202 corr = np.zeros(tseries1.shape[1]) 

203 for ind in range(tseries1.shape[1]): 

204 corr[ind], _ = pearsonr(tseries1[:, ind], tseries2[:, ind]) 

205 return corr 

206 

207 

208@pytest.mark.filterwarnings("ignore") 

209def test_nilearn_standardize_false(tmp_path): 

210 """Test removing confounds with no standardization.""" 

211 # niftimasker default: 

212 # standardize=False, standardize_confounds=True, detrend=False 

213 

214 # Simulate data; set demean to False as standardize_confounds=True 

215 (img, mask_conf, mask_rand, confounds, sample_mask) = _simu_img( 

216 tmp_path, trend=False, demean=False 

217 ) 

218 

219 # Check that most variance is removed 

220 # in voxels composed of pure confounds 

221 tseries_std = _tseries_std( 

222 img, 

223 mask_conf, 

224 confounds, 

225 sample_mask, 

226 standardize_signal=False, 

227 standardize_confounds=True, 

228 detrend=False, 

229 ) 

230 assert np.mean(tseries_std < 0.0001) 

231 

232 # Check that most variance is preserved 

233 # in voxels composed of random noise 

234 tseries_std = _tseries_std( 

235 img, 

236 mask_rand, 

237 confounds, 

238 sample_mask, 

239 standardize_signal=False, 

240 standardize_confounds=True, 

241 detrend=False, 

242 ) 

243 assert np.mean(tseries_std > 0.9) 

244 

245 

246@pytest.mark.timeout(0) 

247@pytest.mark.filterwarnings("ignore") 

248@pytest.mark.parametrize("standardize_signal", ["zscore", "psc"]) 

249@pytest.mark.parametrize( 

250 "standardize_confounds,detrend", 

251 [(True, False), (False, True), (True, True)], 

252) 

253def test_nilearn_standardize( 

254 tmp_path, standardize_signal, standardize_confounds, detrend 

255): 

256 """Test confounds removal with logical parameters for processing signal.""" 

257 # demean is set to False to let signal.clean handle everything 

258 (img, mask_conf, mask_rand, confounds, mask) = _simu_img( 

259 tmp_path, trend=True, demean=False 

260 ) 

261 # We now load the time series with vs without confounds 

262 # in voxels composed of pure confounds 

263 # the correlation before and after denoising should be very low 

264 # as most of the variance is removed by denoising 

265 tseries_raw, tseries_clean = _denoise( 

266 img, 

267 mask_conf, 

268 confounds, 

269 mask, 

270 standardize_signal=standardize_signal, 

271 standardize_confounds=standardize_confounds, 

272 detrend=detrend, 

273 ) 

274 corr = _corr_tseries(tseries_raw, tseries_clean) 

275 assert np.absolute(np.mean(corr)) < 0.2 

276 

277 # We now load the time series with zscore standardization 

278 # with vs without confounds in voxels where the signal is uncorrelated 

279 # with confounds. The correlation before and after denoising should be very 

280 # high as very little of the variance is removed by denoising 

281 tseries_raw, tseries_clean = _denoise( 

282 img, 

283 mask_rand, 

284 confounds, 

285 mask, 

286 standardize_signal=standardize_signal, 

287 standardize_confounds=standardize_confounds, 

288 detrend=detrend, 

289 ) 

290 corr = _corr_tseries(tseries_raw, tseries_clean) 

291 assert corr.mean() > 0.8 

292 

293 

294@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

295def test_confounds2df(tmp_path, fmriprep_version): 

296 """Check auto-detect of confonds from an fMRI nii image.""" 

297 img_nii, _ = create_tmp_filepath( 

298 tmp_path, copy_confounds=True, fmriprep_version=fmriprep_version 

299 ) 

300 confounds, _ = load_confounds(img_nii) 

301 assert "trans_x" in confounds.columns 

302 

303 

304@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

305def test_load_single_confounds_file(tmp_path, fmriprep_version): 

306 """Check that the load_confounds function returns the same confounds \ 

307 as _load_single_confounds_file. 

308 """ 

309 nii_file, confounds_file = create_tmp_filepath( 

310 tmp_path, copy_confounds=True, fmriprep_version=fmriprep_version 

311 ) 

312 

313 # get defaults from load_confounds 

314 import inspect 

315 

316 _defaults = { 

317 key: value.default 

318 for key, value in inspect.signature(load_confounds).parameters.items() 

319 } 

320 _defaults.pop("img_files") 

321 _default_strategy = _defaults.pop("strategy") 

322 

323 _, confounds = _load_single_confounds_file( 

324 str(confounds_file), strategy=_default_strategy, **_defaults 

325 ) 

326 confounds_nii, _ = load_confounds( 

327 nii_file, strategy=_default_strategy, **_defaults 

328 ) 

329 pd.testing.assert_frame_equal(confounds, confounds_nii) 

330 

331 

332@pytest.mark.parametrize( 

333 "strategy,message", 

334 [ 

335 ( 

336 ["string"], 

337 "not a supported type of confounds.", 

338 ), 

339 ("error", "tuple or list of strings"), 

340 ((0,), "not a supported type of confounds."), 

341 (("compcor",), "high_pass"), 

342 ], 

343) 

344def test_check_strategy(strategy, message): 

345 """Check that flawed strategy options \ 

346 generate meaningful error messages. 

347 """ 

348 with pytest.raises(ValueError) as exc_info: 

349 _check_strategy(strategy=strategy) 

350 assert message in exc_info.value.args[0] 

351 

352 

353SUFFIXES = np.array(["", "_derivative1", "_power2", "_derivative1_power2"]) 

354 

355 

356@pytest.fixture 

357def expected_suffixes(motion): 

358 """Return expected suffix.""" 

359 expectation = { 

360 "basic": slice(1), 

361 "derivatives": slice(2), 

362 "power2": np.array([True, False, True, False]), 

363 "full": slice(4), 

364 } 

365 return SUFFIXES[expectation[motion]] 

366 

367 

368@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

369@pytest.mark.parametrize("motion", ["basic", "derivatives", "power2", "full"]) 

370@pytest.mark.parametrize( 

371 "param", ["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"] 

372) 

373def test_motion(tmp_path, motion, param, expected_suffixes, fmriprep_version): 

374 """Check that the expected motion confounds are loaded.""" 

375 img_nii, _ = create_tmp_filepath( 

376 tmp_path, copy_confounds=True, fmriprep_version=fmriprep_version 

377 ) 

378 conf, _ = load_confounds(img_nii, strategy=("motion",), motion=motion) 

379 for suff in SUFFIXES: 

380 if suff in expected_suffixes: 

381 assert f"{param}{suff}" in conf.columns 

382 else: 

383 assert f"{param}{suff}" not in conf.columns 

384 

385 

386@pytest.mark.parametrize( 

387 "compcor, n_compcor, test_keyword, test_n, fmriprep_version", 

388 [ 

389 ("anat_combined", 2, "a_comp_cor_", 2, "1.4.x"), 

390 ("anat_separated", 2, "a_comp_cor_", 4, "1.4.x"), 

391 ("anat_combined", "all", "a_comp_cor_", 57, "1.4.x"), 

392 ("temporal", "all", "t_comp_cor_", 6, "1.4.x"), 

393 ("anat_combined", 2, "a_comp_cor_", 2, "21.x.x"), 

394 ("anat_separated", "all", "w_comp_cor_", 4, "21.x.x"), 

395 ("temporal_anat_separated", "all", "c_comp_cor_", 3, "21.x.x"), 

396 ("temporal", "all", "t_comp_cor_", 3, "21.x.x"), 

397 ], 

398) 

399def test_n_compcor( 

400 tmp_path, compcor, n_compcor, test_keyword, test_n, fmriprep_version 

401): 

402 """Check that the expected number of confounds are loaded.""" 

403 img_nii, _ = create_tmp_filepath( 

404 tmp_path, 

405 copy_confounds=True, 

406 copy_json=True, 

407 fmriprep_version=fmriprep_version, 

408 ) 

409 conf, _ = load_confounds( 

410 img_nii, 

411 strategy=( 

412 "high_pass", 

413 "compcor", 

414 ), 

415 compcor=compcor, 

416 n_compcor=n_compcor, 

417 ) 

418 assert sum(True for col in conf.columns if test_keyword in col) == test_n 

419 

420 

421missing_params = ["trans_y", "trans_x_derivative1", "rot_z_power2"] 

422missing_keywords = ["cosine", "global_signal"] 

423 

424 

425def _remove_confounds(conf_file): 

426 legal_confounds = pd.read_csv(conf_file, delimiter="\t", encoding="utf-8") 

427 remove_columns = [] 

428 for missing_kw in missing_keywords: 

429 remove_columns += [ 

430 col_name 

431 for col_name in legal_confounds.columns 

432 if missing_kw in col_name 

433 ] 

434 

435 aroma = [ 

436 col_name for col_name in legal_confounds.columns if "aroma" in col_name 

437 ] 

438 missing_confounds = legal_confounds.drop( 

439 columns=missing_params + remove_columns + aroma 

440 ) 

441 missing_confounds.to_csv(conf_file, sep="\t", index=False) 

442 

443 

444@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

445def test_not_found_exception(tmp_path, fmriprep_version): 

446 """Check various file or parameter missing scenario.""" 

447 # Create invalid confound file in temporary dir 

448 img_missing_confounds, bad_conf = create_tmp_filepath( 

449 tmp_path, 

450 copy_confounds=True, 

451 copy_json=False, 

452 fmriprep_version=fmriprep_version, 

453 ) 

454 

455 _remove_confounds(bad_conf) 

456 

457 with pytest.raises(ValueError) as exc_info: 

458 load_confounds( 

459 img_missing_confounds, 

460 strategy=( 

461 "high_pass", 

462 "motion", 

463 "global_signal", 

464 ), 

465 global_signal="full", 

466 motion="full", 

467 ) 

468 assert f"{missing_params}" in exc_info.value.args[0] 

469 

470 # missing cosine if it's not present in the file it's fine 

471 assert f"{missing_keywords[-1:]}" in exc_info.value.args[0] 

472 

473 # loading anat compcor should also raise an error, because the json file is 

474 # missing for that example dataset 

475 with pytest.raises(ValueError): 

476 load_confounds( 

477 img_missing_confounds, 

478 strategy=("high_pass", "compcor"), 

479 compcor="anat_combined", 

480 ) 

481 

482 # catch invalid compcor option 

483 with pytest.raises(KeyError): 

484 load_confounds( 

485 img_missing_confounds, 

486 strategy=("high_pass", "compcor"), 

487 compcor="blah", 

488 ) 

489 

490 

491@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

492def test_not_found_exception_ica_aroma(tmp_path, fmriprep_version): 

493 """Check various file or parameter for ICA-AROMA strategy.""" 

494 # Create invalid confound file in temporary dir 

495 img_missing_confounds, bad_conf = create_tmp_filepath( 

496 tmp_path, 

497 copy_confounds=True, 

498 copy_json=False, 

499 fmriprep_version=fmriprep_version, 

500 ) 

501 

502 _remove_confounds(bad_conf) 

503 

504 # Aggressive ICA-AROMA strategy requires 

505 # default nifti and noise ICs in confound file 

506 # correct nifti but missing noise regressor 

507 with pytest.raises(ValueError) as exc_info: 

508 load_confounds( 

509 img_missing_confounds, strategy=("ica_aroma",), ica_aroma="basic" 

510 ) 

511 assert "ica_aroma" in exc_info.value.args[0] 

512 

513 # Default nifti 

514 aroma_nii, _ = create_tmp_filepath( 

515 tmp_path, 

516 image_type="ica_aroma", 

517 bids_fields={"entities": {"sub": "icaAroma"}}, 

518 fmriprep_version=fmriprep_version, 

519 ) 

520 with pytest.raises(ValueError) as exc_info: 

521 load_confounds(aroma_nii, strategy=("ica_aroma",), ica_aroma="basic") 

522 assert "Invalid file type" in exc_info.value.args[0] 

523 

524 # non aggressive ICA-AROMA strategy requires 

525 # desc-smoothAROMAnonaggr nifti file 

526 with pytest.raises(ValueError) as exc_info: 

527 load_confounds( 

528 img_missing_confounds, strategy=("ica_aroma",), ica_aroma="full" 

529 ) 

530 assert "desc-smoothAROMAnonaggr_bold" in exc_info.value.args[0] 

531 

532 # no confound files along the image file 

533 (tmp_path / bad_conf).unlink() 

534 with pytest.raises(ValueError) as exc_info: 

535 load_confounds(img_missing_confounds) 

536 assert "Could not find associated confound file." in exc_info.value.args[0] 

537 

538 

539@pytest.mark.parametrize("fmriprep_version", ["1.4.x", "21.x.x"]) 

540def test_non_steady_state(tmp_path, fmriprep_version): 

541 """Warn when 'non_steady_state' is in strategy.""" 

542 # supplying 'non_steady_state' in strategy is not necessary 

543 # check warning is correctly raised 

544 img, _ = create_tmp_filepath( 

545 tmp_path, copy_confounds=True, fmriprep_version=fmriprep_version 

546 ) 

547 warning_message = r"Non-steady state" 

548 with pytest.warns(UserWarning, match=warning_message): 

549 load_confounds(img, strategy=("non_steady_state", "motion")) 

550 

551 

552def test_load_non_nifti(tmp_path): 

553 """Test non-nifti and invalid file type as input.""" 

554 # tsv file - unsupported input 

555 _, tsv = create_tmp_filepath(tmp_path, copy_confounds=True, copy_json=True) 

556 

557 with pytest.raises(ValueError): 

558 load_confounds(str(tsv)) 

559 

560 # cifti file should be supported 

561 cifti, _ = create_tmp_filepath( 

562 tmp_path, image_type="cifti", copy_confounds=True, copy_json=True 

563 ) 

564 conf, _ = load_confounds(cifti) 

565 assert conf.size != 0 

566 

567 # gifti support 

568 gifti, _ = create_tmp_filepath( 

569 tmp_path, image_type="gifti", copy_confounds=True, copy_json=True 

570 ) 

571 conf, _ = load_confounds(gifti) 

572 assert conf.size != 0 

573 

574 

575def test_invalid_filetype(tmp_path, rng): 

576 """Invalid file types/associated files for load method.""" 

577 bad_nii, bad_conf = create_tmp_filepath( 

578 tmp_path, copy_confounds=True, fmriprep_version="1.4.x" 

579 ) 

580 _, _ = load_confounds(bad_nii) 

581 

582 # more than one legal filename for confounds 

583 add_conf = "sub-14x_task-test_desc-confounds_regressors.tsv" 

584 legal_confounds, _ = get_legal_confound() 

585 legal_confounds.to_csv(tmp_path / add_conf, sep="\t", index=False) 

586 with pytest.raises(ValueError) as info: 

587 load_confounds(bad_nii) 

588 assert "more than one" in str(info.value) 

589 (tmp_path / add_conf).unlink() # Remove for the rest of the tests to run 

590 

591 # invalid fmriprep version: confound file with no header (<1.0) 

592 fake_confounds = rng.random((30, 20)) 

593 np.savetxt(bad_conf, fake_confounds, delimiter="\t") 

594 with pytest.raises(ValueError) as error_log: 

595 load_confounds(bad_nii) 

596 assert "The confound file contains no header." in str(error_log.value) 

597 

598 # invalid fmriprep version: old camel case header (<1.2) 

599 legal_confounds, _ = get_legal_confound() 

600 camel_confounds = legal_confounds.copy() 

601 camel_confounds.columns = [ 

602 to_camel_case(col_name) for col_name in legal_confounds.columns 

603 ] 

604 camel_confounds.to_csv(bad_conf, sep="\t", index=False) 

605 with pytest.raises(ValueError) as error_log: 

606 load_confounds(bad_nii) 

607 assert "contains header in camel case." in str(error_log.value) 

608 

609 # create a empty nifti file with no associated confound file 

610 # We only need the path to check this 

611 no_conf = "no_confound_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz" 

612 no_confound = tmp_path / no_conf 

613 no_confound.touch() 

614 with pytest.raises(ValueError): 

615 load_confounds(bad_nii) 

616 

617 

618@pytest.mark.parametrize("fmriprep_version", ["1.4.x"]) 

619def test_ica_aroma(tmp_path, fmriprep_version): 

620 """Test ICA AROMA related file input.""" 

621 aroma_nii, _ = create_tmp_filepath( 

622 tmp_path, 

623 image_type="ica_aroma", 

624 copy_confounds=True, 

625 fmriprep_version=fmriprep_version, 

626 ) 

627 regular_nii, _ = create_tmp_filepath( 

628 tmp_path, 

629 image_type="regular", 

630 copy_confounds=True, 

631 fmriprep_version=fmriprep_version, 

632 ) 

633 # Aggressive strategy 

634 conf, _ = load_confounds( 

635 regular_nii, strategy=("ica_aroma",), ica_aroma="basic" 

636 ) 

637 for col_name in conf.columns: 

638 # only aroma and non-steady state columns will be present 

639 assert re.match(r"(?:aroma_motion_+|non_steady_state+)", col_name) 

640 

641 # Non-aggressive strategy 

642 conf, _ = load_confounds( 

643 aroma_nii, strategy=("ica_aroma",), ica_aroma="full" 

644 ) 

645 assert conf.size == 0 

646 

647 # invalid combination of strategy and option 

648 with pytest.raises(ValueError) as exc_info: 

649 conf, _ = load_confounds( 

650 regular_nii, strategy=("ica_aroma",), ica_aroma="invalid" 

651 ) 

652 assert "Current input: invalid" in exc_info.value.args[0] 

653 

654 

655@pytest.mark.parametrize( 

656 "fmriprep_version, scrubbed_time_points, non_steady_outliers", 

657 [("1.4.x", 8, 1), ("21.x.x", 30, 3)], 

658) 

659@pytest.mark.filterwarnings("ignore::RuntimeWarning") 

660def test_sample_mask( 

661 tmp_path, fmriprep_version, scrubbed_time_points, non_steady_outliers 

662): 

663 """Test load method and sample mask.""" 

664 regular_nii, regular_conf = create_tmp_filepath( 

665 tmp_path, 

666 image_type="regular", 

667 copy_confounds=True, 

668 fmriprep_version=fmriprep_version, 

669 ) 

670 

671 reg, mask = load_confounds( 

672 regular_nii, strategy=("motion", "scrub"), scrub=5, fd_threshold=0.15 

673 ) 

674 # the "1.4.x" test data has 6 time points marked as motion outliers, 

675 # and one nonsteady state (overlap with the first motion outlier) 

676 # 2 time points removed due to the "full" scrubbing strategy 

677 # (remove segment shorter than 5 volumes) 

678 assert reg.shape[0] - len(mask) == scrubbed_time_points 

679 

680 # nilearn requires unmasked confound regressors 

681 assert reg.shape[0] == 30 

682 

683 # non steady state will always be removed 

684 reg, mask = load_confounds(regular_nii, strategy=("motion",)) 

685 assert reg.shape[0] - len(mask) == non_steady_outliers 

686 

687 # When no non-steady state volumes are present 

688 conf_data, _ = get_legal_confound(non_steady_state=False) 

689 conf_data.to_csv(regular_conf, sep="\t", index=False) # save to tmp 

690 reg, mask = load_confounds(regular_nii, strategy=("motion",)) 

691 assert mask is None 

692 

693 # When no volumes needs removing (very liberal motion threshould) 

694 reg, mask = load_confounds( 

695 regular_nii, strategy=("motion", "scrub"), scrub=0, fd_threshold=4 

696 ) 

697 assert mask is None 

698 

699 

700@pytest.mark.parametrize( 

701 "image_type", 

702 [ 

703 "regular", 

704 "native", 

705 "ica_aroma", 

706 "gifti", 

707 "cifti", 

708 "res", 

709 "den", 

710 "part", 

711 ], 

712) 

713def test_inputs(tmp_path, image_type): 

714 """Test multiple images as input.""" 

715 # generate files 

716 files = [] 

717 for i in range(2): # gifti edge case 

718 nii, _ = create_tmp_filepath( 

719 tmp_path, 

720 bids_fields={ 

721 "entities": { 

722 "sub": f"test{i + 1}", 

723 "ses": "test", 

724 "task": "testimg", 

725 "run": "01", 

726 } 

727 }, 

728 image_type=image_type, 

729 copy_confounds=True, 

730 copy_json=True, 

731 ) 

732 files.append(nii) 

733 

734 if image_type == "ica_aroma": 

735 conf, _ = load_confounds(files, strategy=("ica_aroma",)) 

736 else: 

737 conf, _ = load_confounds(files) 

738 assert len(conf) == 2 

739 

740 

741def test_load_confounds_for_gifti(tmp_path): 

742 """Ensure that confounds are found for gifti files. 

743 

744 Regression test for 

745 https://github.com/nilearn/nilearn/issues/3817 

746 Wrong order of space and hemi entity in filename pattern 

747 lead to confounds not being found. 

748 """ 

749 bids_path = create_fake_bids_dataset(base_dir=tmp_path, n_sub=1, n_ses=1) 

750 selection = get_bids_files( 

751 bids_path / "derivatives", 

752 sub_label="01", 

753 file_tag="bold", 

754 file_type="func.gii", 

755 filters=[ 

756 ("ses", "01"), 

757 ("task", "main"), 

758 ("run", "01"), 

759 ("hemi", "L"), 

760 ], 

761 sub_folder=True, 

762 ) 

763 assert len(selection) == 1 

764 load_confounds( 

765 selection[0], 

766 strategy=["motion", "wm_csf"], 

767 motion="basic", 

768 demean=False, 

769 )