Coverage for nilearn/plotting/find_cuts.py: 0%
188 statements
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-16 12:32 +0200
« prev ^ index » next coverage.py v7.9.1, created at 2025-06-16 12:32 +0200
1"""Tools to find activations and cut on maps."""
3import numbers
4import warnings
6import numpy as np
7from scipy.ndimage import center_of_mass, find_objects, label
9from nilearn._utils import as_ndarray, check_niimg_3d, check_niimg_4d
10from nilearn._utils.extmath import fast_abs_percentile
11from nilearn._utils.logger import find_stack_level
12from nilearn._utils.ndimage import largest_connected_component
13from nilearn._utils.niimg import safe_get_data
15# Local imports
16from nilearn.image import get_data, iter_img, reorder_img
17from nilearn.image.image import smooth_array
18from nilearn.image.resampling import coord_transform
19from nilearn.plotting._utils import check_threshold_not_negative
21###############################################################################
22# Functions for automatic choice of cuts coordinates
23###############################################################################
25DEFAULT_CUT_COORDS = (0.0, 0.0, 0.0)
28def find_xyz_cut_coords(img, mask_img=None, activation_threshold=None):
29 """Find the center of the largest activation connected component.
31 Parameters
32 ----------
33 img : 3D Nifti1Image
34 The brain map.
36 mask_img : 3D Nifti1Image or None, default=None
37 An optional brain mask, provided mask_img should not be empty.
39 activation_threshold : :obj:`float` or None, default=None
40 The lower threshold to the positive activation. If None, the
41 activation threshold is computed using the 80% percentile of
42 the absolute value of the map.
44 Returns
45 -------
46 x : :obj:`float`
47 The x world coordinate.
49 y : :obj:`float`
50 The y world coordinate.
52 z : :obj:`float`
53 The z world coordinate.
55 Raises
56 ------
57 ValueError
58 if the specified threshold is a negative number
59 """
60 check_threshold_not_negative(activation_threshold)
61 # if a pseudo-4D image or several images were passed (cf. #922),
62 # we reduce to a single 3D image to find the coordinates
63 img = check_niimg_3d(img)
64 data = safe_get_data(img)
66 # when given image is empty, return (0., 0., 0.)
67 if np.all(data == 0.0):
68 warnings.warn(
69 "Given img is empty. "
70 f"Returning default cut_coords={DEFAULT_CUT_COORDS} instead.",
71 stacklevel=find_stack_level(),
72 )
73 x_map, y_map, z_map = DEFAULT_CUT_COORDS
74 return np.asarray(
75 coord_transform(x_map, y_map, z_map, img.affine)
76 ).tolist()
78 # Retrieve optional mask
79 if mask_img is not None:
80 mask_img = check_niimg_3d(mask_img)
81 mask = safe_get_data(mask_img)
82 if not np.allclose(mask_img.affine, img.affine):
83 raise ValueError(
84 f"Mask affine:\n{mask_img.affine}\n "
85 f"is different from img affine:\n{img.affine}"
86 )
87 else:
88 mask = None
90 # To speed up computations, we work with partial views of the array,
91 # and keep track of the offset
92 offset = np.zeros(3)
94 # Deal with masked arrays:
95 if hasattr(data, "mask"):
96 not_mask = np.logical_not(data.mask)
97 if mask is None:
98 mask = not_mask
99 else:
100 mask *= not_mask
101 data = np.asarray(data)
103 # Get rid of potential memmapping
104 data = as_ndarray(data)
105 my_map = data.copy()
106 if mask is not None:
107 # check against empty mask
108 if mask.sum() == 0.0:
109 warnings.warn(
110 "Could not determine cut coords: "
111 "Provided mask is empty. "
112 "Returning center of mass instead.",
113 stacklevel=find_stack_level(),
114 )
115 cut_coords = center_of_mass(np.abs(my_map)) + offset
116 x_map, y_map, z_map = cut_coords
117 return np.asarray(
118 coord_transform(x_map, y_map, z_map, img.affine)
119 ).tolist()
120 slice_x, slice_y, slice_z = find_objects(mask.astype(int))[0]
121 my_map = my_map[slice_x, slice_y, slice_z]
122 mask = mask[slice_x, slice_y, slice_z]
123 my_map *= mask
124 offset += [slice_x.start, slice_y.start, slice_z.start]
125 # Testing min and max is faster than np.all(my_map == 0)
126 if my_map.max() == my_map.min() == 0:
127 warnings.warn(
128 "Could not determine cut coords: "
129 "All values were masked. "
130 "Returning center of mass of unmasked data instead.",
131 stacklevel=find_stack_level(),
132 )
133 # Call center of mass on initial data since my_map is zero.
134 # Therefore, do not add offset to cut_coords.
135 cut_coords = center_of_mass(np.abs(data))
136 x_map, y_map, z_map = cut_coords
137 return np.asarray(
138 coord_transform(x_map, y_map, z_map, img.affine)
139 ).tolist()
140 if activation_threshold is None:
141 activation_threshold = fast_abs_percentile(
142 my_map[my_map != 0].ravel(), 80
143 )
144 try:
145 eps = 2 * np.finfo(activation_threshold).eps
146 except ValueError:
147 # The above will fail for exact types, eg integers
148 eps = 1e-15
150 mask = np.abs(my_map) > (activation_threshold - eps)
151 # mask may be zero everywhere in rare cases
152 if mask.max() == 0:
153 warnings.warn(
154 "Could not determine cut coords: "
155 "All voxels were masked by the thresholding. "
156 "Returning the center of mass instead.",
157 stacklevel=find_stack_level(),
158 )
159 cut_coords = center_of_mass(np.abs(my_map)) + offset
160 x_map, y_map, z_map = cut_coords
161 return np.asarray(
162 coord_transform(x_map, y_map, z_map, img.affine)
163 ).tolist()
165 mask = largest_connected_component(mask)
166 slice_x, slice_y, slice_z = find_objects(mask.astype(int))[0]
167 my_map = my_map[slice_x, slice_y, slice_z]
168 mask = mask[slice_x, slice_y, slice_z]
169 my_map *= mask
170 offset += [slice_x.start, slice_y.start, slice_z.start]
172 # For the second threshold, we use a mean, as it is much faster,
173 # although it is less robust
174 second_threshold = np.abs(np.mean(my_map[mask]))
175 second_mask = np.abs(my_map) > second_threshold
176 if second_mask.sum() > 50:
177 my_map *= largest_connected_component(second_mask)
178 cut_coords = center_of_mass(np.abs(my_map))
179 x_map, y_map, z_map = cut_coords + offset
181 # Return as a list of scalars
182 return np.asarray(
183 coord_transform(x_map, y_map, z_map, img.affine)
184 ).tolist()
187def _transform_cut_coords(cut_coords, direction, affine):
188 """Transform cut_coords back in image space.
190 Parameters
191 ----------
192 cut_coords : 1D array of length n_cuts
193 The coordinates to be transformed.
195 direction : :obj:`str`
196 Sectional direction; possible values are "x", "y", or "z".
198 affine : 2D array of shape (4, 4)
199 The affine for the image.
201 Returns
202 -------
203 cut_coords : 1D array of length n_cuts
204 The original cut_coords transformed image space.
206 """
207 # make kwargs
208 axis = "xyz".index(direction)
209 kwargs = {name: np.zeros(len(cut_coords)) for name in "xyz"}
210 kwargs[direction] = cut_coords
211 kwargs["affine"] = affine
213 # We need atleast_1d to make sure that when n_cuts is 1 we do
214 # get an iterable
215 cut_coords = coord_transform(**kwargs)[axis]
216 return np.atleast_1d(cut_coords)
219def find_cut_slices(img, direction="z", n_cuts=7, spacing="auto"):
220 """Find 'good' cross-section slicing positions along a given axis.
222 Parameters
223 ----------
224 img : 3D Niimg-like object
225 See :ref:`extracting_data`.
226 The brain map.
228 direction : :obj:`str`, default='z'
229 Sectional direction; possible values are "x", "y", or "z".
231 n_cuts : :obj:`int`, default=7
232 Number of cuts in the plot.
234 spacing : 'auto' or :obj:`int`, default='auto'
235 Minimum spacing between cuts (in voxels, not millimeters)
236 if 'auto', the spacing is .5 / n_cuts * img_length.
238 Returns
239 -------
240 cut_coords : 1D array of length n_cuts
241 The computed cut_coords.
243 Notes
244 -----
245 This code works by iteratively locating peak activations that are
246 separated by a distance of at least 'spacing'. If n_cuts is very
247 large and all the activated regions are covered, cuts with a spacing
248 less than 'spacing' will be returned.
250 Warnings
251 --------
252 If a non-diagonal img is given. This function automatically reorders
253 img to get it back to diagonal. This is to avoid finding same cuts in
254 the slices.
256 """
257 # misc
258 if direction not in "xyz":
259 raise ValueError(
260 f"'direction' must be one of 'x', 'y', or 'z'. Got '{direction}'"
261 )
262 axis = "xyz".index(direction)
263 img = check_niimg_3d(img)
264 affine = img.affine
265 if not np.all(np.diag(affine)[:3]):
266 warnings.warn(
267 "A non-diagonal affine is found in the given "
268 "image. Reordering the image to get diagonal affine "
269 "for finding cuts in the slices.",
270 stacklevel=find_stack_level(),
271 )
272 # resample is set to avoid issues with an image having a non-diagonal
273 # affine and rotation.
274 img = reorder_img(img, resample="nearest", copy_header=True)
275 affine = img.affine
276 # note: orig_data is a copy of img._data_cache thanks to np.abs
277 orig_data = np.abs(safe_get_data(img))
278 this_shape = orig_data.shape[axis]
280 if not isinstance(n_cuts, numbers.Number):
281 raise ValueError(
282 "The number of cuts (n_cuts) must be an integer "
283 "greater than or equal to 1. "
284 f"You provided a value of n_cuts={n_cuts}."
285 )
287 # BF issue #575: Return all the slices along and axis if this axis
288 # is the display mode and there are at least as many requested
289 # n_slices as there are slices.
290 if n_cuts > this_shape:
291 warnings.warn(
292 "Too many cuts requested for the data: "
293 f"n_cuts={n_cuts}, data size={this_shape}.",
294 stacklevel=find_stack_level(),
295 )
296 return _transform_cut_coords(np.arange(this_shape), direction, affine)
298 # To smooth data that might be np.int or np.uint,
299 # first convert it to float.
300 data = orig_data.copy()
301 if data.dtype.kind in ("i", "u"):
302 data = data.astype(np.float64)
304 data = smooth_array(data, affine, fwhm="fast")
306 # to control floating point error problems
307 # during given input value "n_cuts"
308 epsilon = np.finfo(np.float32).eps
309 difference = abs(round(n_cuts) - n_cuts)
310 if round(n_cuts) < 1.0 or difference > epsilon:
311 message = (
312 f"Image has {this_shape} slices in direction {direction}. "
313 "Therefore, the number of cuts "
314 f"must be between 1 and {this_shape}. "
315 f"You provided n_cuts={n_cuts}."
316 )
317 raise ValueError(message)
318 else:
319 n_cuts = round(n_cuts)
321 if spacing == "auto":
322 spacing = max(int(0.5 / n_cuts * data.shape[axis]), 1)
324 slices = [slice(None, None), slice(None, None), slice(None, None)]
326 cut_coords = []
328 for _ in range(n_cuts):
329 # Find a peak
330 max_along_axis = np.unravel_index(np.abs(data).argmax(), data.shape)[
331 axis
332 ]
334 # cancel out the surroundings of the peak
335 start = max(0, max_along_axis - spacing)
336 stop = max_along_axis + spacing
337 slices[axis] = slice(start, stop)
338 # We don't actually fully zero the neighborhood, to avoid ending
339 # up with fully zeros if n_cuts is too big: we can do multiple
340 # passes on the data
341 data[tuple(slices)] *= 1.0e-3
343 cut_coords.append(max_along_axis)
345 # We sometimes get duplicated cuts, so we add cuts at the beginning
346 # and the end
347 cut_coords = np.unique(cut_coords).tolist()
348 while len(cut_coords) < n_cuts:
349 # Candidates for new cuts:
350 slice_below = min(cut_coords) - 2
351 slice_above = max(cut_coords) + 2
352 candidates = [slice_above]
353 # One slice where there is the biggest gap in the existing
354 # cut_coords
355 if len(cut_coords) > 1:
356 middle_idx = np.argmax(np.diff(cut_coords))
357 slice_middle = int(
358 0.5 * (cut_coords[middle_idx] + cut_coords[middle_idx + 1])
359 )
360 if slice_middle not in cut_coords:
361 candidates.append(slice_middle)
362 if slice_below >= 0:
363 # We need positive slice to avoid having negative
364 # indices, which would work, but not the way we think of them
365 candidates.append(slice_below)
366 best_weight = -10
367 for candidate in candidates:
368 if candidate >= this_shape:
369 this_weight = 0
370 else:
371 this_weight = np.sum(np.rollaxis(orig_data, axis)[candidate])
372 if this_weight > best_weight:
373 best_candidate = candidate
374 best_weight = this_weight
376 cut_coords.append(best_candidate)
377 cut_coords = np.unique(cut_coords).tolist()
379 cut_coords = np.array(cut_coords)
380 cut_coords.sort()
382 return _transform_cut_coords(cut_coords, direction, affine)
385def find_parcellation_cut_coords(
386 labels_img,
387 background_label=0,
388 return_label_names=False,
389 label_hemisphere="left",
390):
391 """Return coordinates of center of mass of 3D :term:`parcellation` atlas.
393 Parameters
394 ----------
395 labels_img : 3D Nifti1Image
396 A brain :term:`parcellation` atlas with specific mask labels for each
397 parcellated region.
399 background_label : :obj:`int`, default=0
400 Label value used in labels_img to represent background.
402 return_label_names : :obj:`bool`, default=False
403 Returns list of labels.
405 label_hemisphere : 'left' or 'right', default='left'
406 Choice of hemisphere to compute label center coords for.
407 Applies only in cases where atlas labels are lateralized.
408 Eg. Yeo or Harvard Oxford atlas.
410 Returns
411 -------
412 coords : numpy.ndarray of shape (n_labels, 3)
413 Label regions cut coordinates in image space (mm).
415 labels_list : :obj:`list`, optional
416 Label region. Returned only when return_label_names is True.
418 See Also
419 --------
420 nilearn.plotting.find_probabilistic_atlas_cut_coords : For coordinates
421 extraction on probabilistic atlases (4D) (Eg. MSDL atlas)
423 """
424 # check label_hemisphere input
425 if label_hemisphere not in ["left", "right"]:
426 raise ValueError(
427 f"Invalid label_hemisphere name:{label_hemisphere}.\n"
428 "Should be one of these 'left' or 'right'."
429 )
430 # Grab data and affine
431 labels_img = reorder_img(check_niimg_3d(labels_img), copy_header=True)
432 labels_data = get_data(labels_img)
433 labels_affine = labels_img.affine
435 # Grab number of unique values in 3d image
436 unique_labels = np.unique(labels_data)
437 unique_labels = np.delete(unique_labels, background_label)
439 # Loop over parcellation labels, grab center of mass and dump into coords
440 # list
441 coord_list = []
442 label_list = []
444 for cur_label in unique_labels:
445 cur_img = labels_data == cur_label
447 # Grab hemispheres separately
448 x, y, z = coord_transform(0, 0, 0, np.linalg.inv(labels_affine))
449 left_hemi = get_data(labels_img).copy() == cur_label
450 right_hemi = get_data(labels_img).copy() == cur_label
451 left_hemi[int(x) :] = 0
452 right_hemi[: int(x)] = 0
454 # Two connected component in both hemispheres
455 left_hemi_has_values = np.any(left_hemi)
456 right_hemi_all_zero = not np.any(right_hemi)
457 if left_hemi_has_values or right_hemi_all_zero:
458 if label_hemisphere == "left":
459 cur_img = left_hemi.astype(int)
460 elif label_hemisphere == "right":
461 cur_img = right_hemi.astype(int)
463 # Take the largest connected component
464 labels, _ = label(cur_img)
465 label_count = np.bincount(labels.ravel().astype(int))
466 label_count[0] = 0
467 component = labels == label_count.argmax()
469 # Get parcellation center of mass
470 x, y, z = center_of_mass(component)
472 # Dump label region and coordinates into a dictionary
473 label_list.append(cur_label)
474 coord_list.append((x, y, z))
476 # Transform coordinates
477 coords = [
478 coord_transform(i[0], i[1], i[2], labels_affine)
479 for i in coord_list
480 ]
482 if return_label_names:
483 return np.array(coords), label_list
484 else:
485 return np.array(coords)
488def find_probabilistic_atlas_cut_coords(maps_img):
489 """Return coordinates of center \
490 :term:`probabilistic atlas<Probabilistic atlas>` 4D image.
492 Parameters
493 ----------
494 maps_img : 4D Nifti1Image
495 A probabilistic brain atlas with probabilistic masks in the fourth
496 dimension.
498 Returns
499 -------
500 coords : :class:`numpy.ndarray` of shape (n_maps, 3)
501 Label regions cut coordinates in image space (mm).
503 See Also
504 --------
505 nilearn.plotting.find_parcellation_cut_coords : For coordinates
506 extraction on parcellations denoted with labels (3D)
507 (Eg. Harvard Oxford atlas)
509 """
510 maps_img = check_niimg_4d(maps_img)
511 maps_imgs = iter_img(maps_img)
512 coords = [find_xyz_cut_coords(img) for img in maps_imgs]
513 return np.array(coords)