|
35 | 35 | """
|
36 | 36 |
|
37 | 37 | # %%
|
| 38 | +from nilearn import plotting |
38 | 39 | import matplotlib.pyplot as plt
|
39 | 40 | import numpy as np
|
40 | 41 | import siibra
|
41 | 42 |
|
42 | 43 | assert siibra.__version__ >= "1.0.1"
|
43 | 44 |
|
44 | 45 | # %%
|
45 |
| -# 1: Retrieve probability map of a motor area in Julich-Brain |
| 46 | +# First we retrieve the probability map of a motor area |
| 47 | +# from the Julich-Brain cytoarchitectonic atlas. |
46 | 48 | region = siibra.get_region("julich 3.0.3", "4p right")
|
47 |
| -region_map = siibra.get_map("julich 3.0.3", "mni152", "statistical").get_volume(region) |
| 49 | +region_map = region.get_regional_map("mni152", "statistical") |
48 | 50 |
|
49 | 51 | # %%
|
50 |
| -# 2: Extract BigBrain 1 micron patches with high probability in this area |
51 |
| -patches = siibra.features.get(region_map, "BigBrain1MicronPatch", lower_threshold=0.5) |
52 |
| -print(f"Found {len(patches)} patches.") |
| 52 | +# We can use the probability map as a query to extract 1 micron resolution |
| 53 | +# cortical image patches from BigBrain. |
| 54 | +# The patches are automatically selected to be centered on the cortex |
| 55 | +# and cover its whole thickness. |
| 56 | +patches = siibra.features.get(region_map, "BigBrain1MicronPatch", lower_threshold=0.52) |
| 57 | + |
| 58 | +# For this example, we filter the results to a particular (nice) brain section, |
| 59 | +# but the tutorial can be run without this filter. |
| 60 | +patches = [p for p in patches if p.bigbrain_section == 3556] |
| 61 | + |
| 62 | +# Results are sorted by relevance to the query, so in our case the first |
| 63 | +# in the list will be the one with highest probability as defined in the |
| 64 | +# probability map. |
| 65 | +patch = patches[0] |
| 66 | + |
| 67 | +# siibra samples the patch in upright position, but keeps its |
| 68 | +# original orientation in the affine transformation encoded in the NIfTI. |
| 69 | +# Let's first plot the pure voxel data of the patch to see that. |
| 70 | +plt.imshow( |
| 71 | + patch.fetch().get_fdata().squeeze(), cmap='gray' |
| 72 | +) |
53 | 73 |
|
54 | 74 | # %%
|
55 |
| -# 3: Display highly rated samples, here further reduced to a predefined section |
56 |
| -section_num = 3556 |
57 |
| -candidates = [p for p in patches if p.bigbrain_section == section_num] |
58 |
| -patch = candidates[0] |
59 |
| -plt.figure() |
60 |
| -plt.imshow(patch.fetch().get_fdata().squeeze(), cmap="gray", vmin=0, vmax=2**16) |
61 |
| -plt.axis("off") |
62 |
| -plt.title(f"#{section_num} - {patch.vertex}", fontsize=10) |
| 75 | +# To understand where and how siibra actually sampled this patch, |
| 76 | +# we first plot the position of the chosen brain section in MNI space. |
| 77 | +view = plotting.plot_glass_brain(region_map.fetch(), cmap='viridis') |
| 78 | +roi_mni = patch.get_boundingbox().warp('mni152') |
| 79 | +_, key, pos = min(zip(roi_mni.shape, view.axes.keys(), roi_mni.center)) |
| 80 | +view.axes[key].ax.axvline(pos, color='red', linestyle='--', linewidth=2) |
63 | 81 |
|
64 | 82 | # %%
|
65 |
| -# To understand how the live query works, we have a look at some of the intermediate |
66 |
| -# steps that `siibra` is performing under the hood. It first identifies brain sections that |
67 |
| -# intersect the given map (or, more generally, the given image volume). |
68 |
| -# Each returned patch still has the corresponding section linked, so we can have a look at it. |
69 |
| -# The section was intersected with the cortical layer 4 surface to get an approximation of |
70 |
| -# the mid cortex. This can be done by fetching the layer surface meshes, and intersecting |
71 |
| -# them with the 3D plane corresponding to the brain section. |
| 83 | +# Next we plot the section itself and identify the larger region of |
| 84 | +# interest around the patch, using the bounding box |
| 85 | +# of the centers of most relevant patches with a bit of padding. |
| 86 | +patch_locations = siibra.PointCloud.union(*[p.get_boundingbox().center for p in patches]) |
| 87 | +roi = patch_locations.boundingbox.zoom(1.5) |
| 88 | + |
| 89 | +# fetch the section at reduced resolution for plotting. |
| 90 | +section_img = patch.section.fetch(resolution_mm=0.2) |
| 91 | +display = plotting.plot_img( |
| 92 | + section_img, display_mode="y", cmap='gray', annotate=False, |
| 93 | +) |
| 94 | +display.title(f"BigBrain section #{patch.bigbrain_section}", size=8) |
| 95 | + |
| 96 | +# Annotate the region of interest bounding box |
| 97 | +ax = list(display.axes.values())[0].ax |
| 98 | +(x, w), _, (z, d) = zip(roi.minpoint, roi.shape) |
| 99 | +ax.add_patch(plt.Rectangle((x, z), w, d, ec='k', fc='none', lw=1)) |
| 100 | + |
| 101 | + |
| 102 | +# %% |
| 103 | +# Zoom in to the region of interest, and show the cortical layer boundaries |
| 104 | +# that were used to define the patch dimensions. |
| 105 | + |
| 106 | +# Since the patch locations only formed a flat bounding box, |
| 107 | +# we first extend the roi to the patch's shape along the flat dimension. |
| 108 | +for dim, size in enumerate(roi.shape): |
| 109 | + if size == 0: |
| 110 | + roi.minpoint[dim] = patch.get_boundingbox().minpoint[dim] |
| 111 | + roi.maxpoint[dim] = patch.get_boundingbox().maxpoint[dim] |
| 112 | + |
| 113 | +# Fetch the region of interest from the section, and plot it. |
| 114 | +roi_img = patch.section.fetch(voi=roi, resolution_mm=-1) |
| 115 | +display = plotting.plot_img(roi_img, display_mode="y", cmap='gray', annotate=False) |
| 116 | +ax = list(display.axes.values())[0].ax |
| 117 | + |
| 118 | +# Intersect cortical layer surfaces with the image plane |
72 | 119 | plane = siibra.Plane.from_image(patch)
|
73 | 120 | layermap = siibra.get_map("cortical layers", space="bigbrain")
|
74 | 121 | layer_contours = {
|
75 | 122 | layername: plane.intersect_mesh(layermap.fetch(region=layername, format="mesh"))
|
76 | 123 | for layername in layermap.regions
|
77 | 124 | }
|
78 |
| -ymin, ymax = [p[1] for p in patch.section.get_boundingbox()] |
79 |
| -crop_voi = siibra.BoundingBox((17.14, ymin, 40.11), (22.82, ymax, 32.91), 'bigbrain') |
80 |
| -cropped_img = patch.section.fetch(voi=crop_voi, resolution_mm=-1) |
81 |
| -phys2pix = np.linalg.inv(cropped_img.affine) |
82 |
| - |
83 |
| -# The probabilities can be assigned to the contour vertices with the |
84 |
| -# probability map. |
85 |
| -points = siibra.PointCloud.union( |
86 |
| - *[c.intersection(crop_voi) for c in layer_contours["cortical layer 4 right"]] |
87 |
| -) |
88 |
| -# siibra warps points to MNI152 and reads corresponding PMAP values |
89 |
| -probs = region_map.evaluate_points(points) |
90 |
| -img_arr = cropped_img.get_fdata().squeeze().swapaxes(0, 1) |
91 |
| -plt.imshow(img_arr, cmap="gray", origin="lower") |
92 |
| -X, Y, Z = points.transform(phys2pix).coordinates.T |
93 |
| -plt.scatter(X, Z, s=10, c=probs) |
94 |
| -prof_x, _, prof_z = zip(*[p.transform(phys2pix) for p in patch.profile]) |
95 |
| -plt.plot(prof_x, prof_z, "r", lw=2) |
96 |
| -plt.axis("off") |
97 | 125 |
|
98 |
| -# %% |
99 |
| -# We can plot the contours on top of the image, and even use the |
100 |
| -# colormap recommended by siibra. We use a crop around the brain |
101 |
| -# region to zoom a bit closer to the extracte profile and patch. |
102 |
| -plt.figure() |
103 |
| -plt.imshow(img_arr, cmap="gray", vmin=0, vmax=2**16, origin="lower") |
| 126 | +# Plot the contours on top of the image, using the |
| 127 | +# colormap provided by siibra. |
104 | 128 | for layername, contours in layer_contours.items():
|
105 | 129 | layercolor = layermap.get_colormap().colors[layermap.get_index(layername).label]
|
106 | 130 | for contour in contours:
|
107 |
| - for segment in contour.crop(crop_voi): |
108 |
| - pixels = segment.transform(phys2pix, space=None).homogeneous |
109 |
| - plt.plot(pixels[:, 0], pixels[:, 2], "-", ms=4, color=layercolor) |
| 131 | + for segment in contour.crop(roi): |
| 132 | + X, _, Z = segment.coordinates.T |
| 133 | + ax.plot(X, Z, "-", ms=4, color=layercolor) |
110 | 134 |
|
111 |
| -# plot the profile |
112 |
| -plt.plot(prof_x, prof_z, "r", lw=2) |
113 |
| -plt.axis("off") |
| 135 | +# %% |
| 136 | +# Plot the region of interest again, this time with the cortical profile that |
| 137 | +# defined the patch, as well as other candidate patch's locations |
| 138 | +# with their relevance scores, ie. probabilities. |
| 139 | +display = plotting.plot_img(roi_img, display_mode="y", cmap='gray', annotate=False) |
| 140 | +ax = list(display.axes.values())[0].ax |
| 141 | + |
| 142 | +# Concatenate all coordinates of the layer 4 intersected contours. |
| 143 | +layer = "cortical layer 4 right" |
| 144 | +XYZ = np.vstack([c.coordinates for c in layer_contours[layer]]) |
| 145 | +layerpoints = siibra.PointCloud(XYZ, space='bigbrain') |
| 146 | +patch_probs = region_map.evaluate_points(layerpoints) |
| 147 | +X, _, Z = layerpoints.coordinates.T |
| 148 | +ax.scatter(X, Z, c=patch_probs, s=10) |
| 149 | + |
| 150 | +# plot the cortical profile in red |
| 151 | +X, _, Z = patch.profile.coordinates.T |
| 152 | +ax.plot(X, Z, "r-", lw=2) |
114 | 153 |
|
115 | 154 | # sphinx_gallery_thumbnail_number = -2
|
0 commit comments