Coverage for nilearn/decoding/tests/test_same_api.py: 21%

120 statements  

« prev     ^ index     » next       coverage.py v7.9.1, created at 2025-06-20 10:58 +0200

1"""Make sure all models are using the same low-level API. 

2 

3for computing image gradient, loss functions, etc. 

4""" 

5 

6import numpy as np 

7import pytest 

8from nibabel import Nifti1Image 

9from numpy.testing import ( 

10 assert_almost_equal, 

11 assert_array_almost_equal, 

12 assert_array_equal, 

13) 

14from sklearn.datasets import load_iris 

15 

16from nilearn.decoding._objective_functions import ( 

17 logistic_loss_lipschitz_constant, 

18 spectral_norm_squared, 

19 squared_loss, 

20 squared_loss_grad, 

21) 

22from nilearn.decoding.space_net import ( 

23 BaseSpaceNet, 

24 SpaceNetClassifier, 

25 SpaceNetRegressor, 

26) 

27from nilearn.decoding.space_net_solvers import ( 

28 _logistic_derivative_lipschitz_constant, 

29 _squared_loss_and_spatial_grad, 

30 _squared_loss_and_spatial_grad_derivative, 

31 _squared_loss_derivative_lipschitz_constant, 

32 graph_net_logistic, 

33 graph_net_squared_loss, 

34 tvl1_solver, 

35) 

36from nilearn.image import get_data 

37from nilearn.masking import unmask_from_to_3d_array 

38 

39 

40def _make_data(rng=None, masked=False, dim=(2, 2, 2)): 

41 if rng is None: 

42 rng = np.random.default_rng(42) 

43 mask = np.ones(dim).astype(bool) 

44 mask[rng.random(dim) < 0.7] = 0 

45 w = np.zeros(dim) 

46 w[dim[0] // 2 :, dim[1] // 2 :, : dim[2] // 2] = 1 

47 n = 5 

48 X = np.ones([n, *dim]) 

49 X += rng.standard_normal(X.shape) 

50 y = np.dot([x[mask] for x in X], w[mask]) 

51 if masked: 

52 X = np.array([x[mask] for x in X]) 

53 w = w[mask] 

54 else: 

55 X = np.rollaxis(X, 0, start=4) 

56 assert X.shape[-1] == n 

57 return X, y, w, mask 

58 

59 

60def to_niimgs(X, dim): 

61 p = np.prod(dim) 

62 

63 assert len(dim) == 3 

64 assert X.shape[-1] <= p 

65 

66 mask = np.zeros(p).astype(bool) 

67 mask[: X.shape[-1]] = 1 

68 

69 assert mask.sum() == X.shape[1] 

70 

71 mask = mask.reshape(dim) 

72 X = np.rollaxis( 

73 np.array([unmask_from_to_3d_array(x, mask) for x in X]), 0, start=4 

74 ) 

75 affine = np.eye(4) 

76 

77 return Nifti1Image(X, affine), Nifti1Image(mask.astype(np.float64), affine) 

78 

79 

80def test_same_energy_calculus_pure_lasso(rng): 

81 X, y, w, mask = _make_data(rng=rng, masked=True) 

82 

83 # check funcvals 

84 f1 = squared_loss(X, y, w) 

85 f2 = _squared_loss_and_spatial_grad(X, y, w.ravel(), mask, 0.0) 

86 

87 assert f1 == f2 

88 

89 # check derivatives 

90 g1 = squared_loss_grad(X, y, w) 

91 g2 = _squared_loss_and_spatial_grad_derivative(X, y, w.ravel(), mask, 0.0) 

92 

93 assert_array_equal(g1, g2) 

94 

95 

96def test_lipschitz_constant_loss_mse(rng): 

97 X, _, _, mask = _make_data(rng=rng, masked=True) 

98 alpha = 0.1 

99 mask = np.ones(X.shape[1]).astype(bool) 

100 grad_weight = alpha * X.shape[0] * 0.0 

101 

102 a = _squared_loss_derivative_lipschitz_constant(X, mask, grad_weight) 

103 b = spectral_norm_squared(X) 

104 

105 assert_almost_equal(a, b) 

106 

107 

108def test_lipschitz_constant_loss_logreg(rng): 

109 X, _, _, mask = _make_data(rng=rng, masked=True) 

110 grad_weight = 0.1 * X.shape[0] * 0.0 

111 

112 a = _logistic_derivative_lipschitz_constant(X, mask, grad_weight) 

113 b = logistic_loss_lipschitz_constant(X) 

114 

115 assert a == b 

116 

117 

118def test_graph_net_and_tvl1_same_for_pure_l1(max_iter=100, decimal=2): 

119 """Check that graph_net_solver and tvl1_solver give same results \ 

120 when l1_ratio = 1. 

121 

122 Results should be exactly the same for pure lasso 

123 However because of the TV-L1 prox approx, results might be 'slightly' 

124 different. 

125 """ 

126 X, y, _, mask = _make_data(dim=(3, 3, 3)) 

127 y = np.round(y) 

128 alpha = 0.01 

129 unmasked_X = np.rollaxis(X, -1, start=0) 

130 unmasked_X = np.array([x[mask] for x in unmasked_X]) 

131 

132 a = tvl1_solver( 

133 unmasked_X, 

134 y, 

135 alpha, 

136 l1_ratio=1.0, 

137 mask=mask, 

138 loss="mse", 

139 max_iter=max_iter, 

140 verbose=1, 

141 )[0] 

142 b = graph_net_squared_loss( 

143 unmasked_X, 

144 y, 

145 alpha, 

146 l1_ratio=1.0, 

147 max_iter=max_iter, 

148 mask=mask, 

149 verbose=0, 

150 )[0] 

151 

152 assert_array_almost_equal(a, b, decimal=decimal) 

153 

154 

155@pytest.mark.parametrize("standardize", [True, False]) 

156def test_graph_net_and_tvl1_same_for_pure_l1_base_space_net( 

157 affine_eye, 

158 standardize, 

159 max_iter=100, 

160 decimal=2, 

161): 

162 """Check that graph_net_solver and tvl1_solver give same results \ 

163 when l1_ratio = 1. 

164 

165 Results should be exactly the same for pure lasso 

166 However because of the TV-L1 prox approx, results might be 'slightly' 

167 different. 

168 """ 

169 X, y, _, mask = _make_data(dim=(3, 3, 3)) 

170 y = np.round(y) 

171 alpha = 0.01 

172 unmasked_X = np.rollaxis(X, -1, start=0) 

173 unmasked_X = np.array([x[mask] for x in unmasked_X]) 

174 

175 mask = Nifti1Image(mask.astype(np.float64), affine_eye) 

176 X = Nifti1Image(X.astype(np.float64), affine_eye) 

177 

178 sl = BaseSpaceNet( 

179 alphas=alpha, 

180 l1_ratios=1.0, 

181 mask=mask, 

182 penalty="graph-net", 

183 max_iter=max_iter, 

184 standardize=standardize, 

185 verbose=0, 

186 ).fit(X, y) 

187 tvl1 = BaseSpaceNet( 

188 alphas=alpha, 

189 l1_ratios=1.0, 

190 mask=mask, 

191 penalty="tv-l1", 

192 max_iter=max_iter, 

193 standardize=standardize, 

194 verbose=0, 

195 ).fit(X, y) 

196 

197 assert_array_almost_equal(sl.coef_, tvl1.coef_, decimal=decimal) 

198 

199 

200def test_graph_net_and_tvl1_same_for_pure_l1_logistic(max_iter=20, decimal=2): 

201 """Check graph_net_solver and tvl1_solver should give same results \ 

202 when l1_ratio = 1. 

203 """ 

204 iris = load_iris() 

205 X, y = iris.data, iris.target 

206 y = y > 0.0 

207 alpha = 1.0 / X.shape[0] 

208 _, mask_ = to_niimgs(X, (2, 2, 2)) 

209 mask = get_data(mask_).astype(bool).ravel() 

210 

211 a = graph_net_logistic( 

212 X, y, alpha, l1_ratio=1.0, mask=mask, max_iter=max_iter, verbose=0 

213 )[0] 

214 b = tvl1_solver( 

215 X, 

216 y, 

217 alpha, 

218 l1_ratio=1.0, 

219 loss="logistic", 

220 mask=mask, 

221 max_iter=max_iter, 

222 verbose=1, 

223 )[0] 

224 

225 assert_array_almost_equal(a, b, decimal=decimal) 

226 

227 

228@pytest.mark.parametrize("standardize", [True, False]) 

229def test_graph_net_and_tvl1_same_for_pure_l1_logistic_spacenet_classifier( 

230 standardize, max_iter=20, decimal=2 

231): 

232 """Check graph_net_solver and tvl1_solver should give same results \ 

233 when l1_ratio = 1. 

234 """ 

235 iris = load_iris() 

236 X, y = iris.data, iris.target 

237 y = y > 0.0 

238 alpha = 1.0 / X.shape[0] 

239 X_, mask_ = to_niimgs(X, (2, 2, 2)) 

240 

241 sl = SpaceNetClassifier( 

242 alphas=alpha, 

243 l1_ratios=1.0, 

244 max_iter=max_iter, 

245 mask=mask_, 

246 penalty="graph-net", 

247 standardize=standardize, 

248 verbose=0, 

249 ).fit(X_, y) 

250 tvl1 = SpaceNetClassifier( 

251 alphas=alpha, 

252 l1_ratios=1.0, 

253 max_iter=max_iter, 

254 mask=mask_, 

255 penalty="tv-l1", 

256 standardize=standardize, 

257 verbose=0, 

258 ).fit(X_, y) 

259 

260 assert_array_almost_equal(sl.coef_[0], tvl1.coef_[0], decimal=decimal) 

261 

262 

263@pytest.mark.parametrize("standardize", [True, False]) 

264def test_graph_net_and_tv_same_for_pure_l1_another_test( 

265 standardize, decimal=1 

266): 

267 """Check that graph_net_solver and tvl1_solver give same results \ 

268 when l1_ratio = 1. 

269 """ 

270 dim = (3, 3, 3) 

271 X, y, _, mask = _make_data(masked=True, dim=dim) 

272 X, mask = to_niimgs(X, dim) 

273 alpha = 0.1 

274 l1_ratio = 1.0 

275 max_iter = 20 

276 

277 sl = BaseSpaceNet( 

278 alphas=alpha, 

279 l1_ratios=l1_ratio, 

280 penalty="graph-net", 

281 max_iter=max_iter, 

282 mask=mask, 

283 is_classif=False, 

284 standardize=standardize, 

285 verbose=0, 

286 ).fit(X, y) 

287 tvl1 = BaseSpaceNet( 

288 alphas=alpha, 

289 l1_ratios=l1_ratio, 

290 penalty="tv-l1", 

291 max_iter=max_iter, 

292 mask=mask, 

293 is_classif=False, 

294 standardize=standardize, 

295 verbose=0, 

296 ).fit(X, y) 

297 

298 assert_array_almost_equal(sl.coef_, tvl1.coef_, decimal=decimal) 

299 

300 

301@pytest.mark.parametrize("penalty", ["graph-net", "tv-l1"]) 

302@pytest.mark.parametrize("cls", [SpaceNetRegressor, SpaceNetClassifier]) 

303def test_coef_shape(penalty, cls): 

304 iris = load_iris() 

305 X, y = iris.data, iris.target 

306 X, mask = to_niimgs(X, (2, 2, 2)) 

307 

308 model = cls( 

309 mask=mask, max_iter=3, penalty=penalty, alphas=1.0, verbose=0 

310 ).fit(X, y) 

311 

312 assert model.coef_.ndim == 2