Skip to content

Commit cf35a4c

Browse files
committed
Use endpoints instead of all anchor points when constructing fundi.
Rather than run the connect_points() on the anchor points, extract_endpoints() from the resulting skeleton, and run connect_points() on the endpoints, implement a very fast approach: 1. threshold likelihood values in a fold/sulcus 2. skeletonize(), retaining anchor points 3. extract endpoints of skeleton that are also anchor points 4. run connect_points() on endpoints and skeletonize results Preliminary anchor point skeletons and endpoint skeletons are the same, but retaining #2 for the following reasons (from yrjo): 1. The likelihood function can have flat areas, so that it doesn't reduce to a single point when thresholding 2. The likelihood function can have areas of low values where the fundus is 'cut' 3. The likelihood function is about to change with the learning approach, and the endpoint selection has to be able to perform with different kinds of likelihood functions, even if they don't produce nice skeletons from thresholding.
1 parent b6f740c commit cf35a4c

File tree

3 files changed

+97
-22
lines changed

3 files changed

+97
-22
lines changed

mindboggle/extract/extract_fundi.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -507,17 +507,16 @@ def extract_fundi(fold_IDs, neighbor_lists, depth_file,
507507
indices_anchors = [indices_fold[x] for x in fold_indices_anchors]
508508
n_anchors = len(indices_anchors)
509509
if n_anchors > 1:
510-
511-
# Connect fundus points and extract fundus
512-
print(' Connect {0} fundus points...'.format(n_anchors))
513510
t2 = time()
514511
likelihoods_fold = Z.copy()
515512
likelihoods_fold[indices_fold] = fold_likelihoods
516513

514+
# Connect fundus points and extract fundus.
517515
# If using only endpoints to connect fundus vertices,
518516
# run in two stages -- fast, to get a rough skeleton
519517
# to extract the endpoints, then slow, to get fundi
520518
if not use_only_endpoints:
519+
print(' Connect {0} fundus points...'.format(n_anchors))
521520
B = connect_points(indices_anchors, faces, indices_fold,
522521
likelihoods_fold, neighbor_lists)
523522
indices_skeleton = [i for i,x in enumerate(B) if x > 0]
@@ -532,6 +531,8 @@ def extract_fundi(fold_IDs, neighbor_lists, depth_file,
532531
indices_endpoints = [x for x in indices_endpoints
533532
if x in indices_anchors]
534533
if len(indices_endpoints) > 1:
534+
print(' Connect {0} fundus endpoints...'.
535+
format(len(indices_endpoints)))
535536
B = connect_points(indices_endpoints, faces, indices_fold,
536537
likelihoods_fold, neighbor_lists)
537538
indices_skeleton = [i for i,x in enumerate(B) if x > 0]

mindboggle/pipeline.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -628,7 +628,6 @@
628628
fundi = Node(name='Fundi',
629629
interface = Fn(function = extract_fundi,
630630
input_names = ['fold_IDs',
631-
'n_folds',
632631
'neighbor_lists',
633632
'depth_file',
634633
'mean_curvature_file',
@@ -638,8 +637,7 @@
638637
'use_only_endpoints'],
639638
output_names = ['fundus_IDs',
640639
'n_fundi']))
641-
featureflow.connect([(sulci, fundi, [('sulcus_IDs','fold_IDs'),
642-
('n_sulci','n_folds')]),
640+
featureflow.connect([(sulci, fundi, [('sulcus_IDs','fold_IDs')]),
643641
(neighbors, fundi,
644642
[('neighbor_lists','neighbor_lists')])])
645643
mbflow.connect([(measureflow, featureflow,

mindboggle/utils/mesh_operations.py

Lines changed: 92 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -563,25 +563,47 @@ def skeletonize(binary_array, indices_to_keep, neighbor_lists):
563563
-------
564564
>>> import os
565565
>>> import numpy as np
566-
>>> from mindboggle.utils.mesh_operations import find_neighbors
567-
>>> from mindboggle.utils.mesh_operations import skeletonize
568566
>>> from mindboggle.utils.io_vtk import load_scalar, rewrite_scalars
569-
>>> from mindboggle.info.sulcus_boundaries import sulcus_boundaries
567+
>>> from mindboggle.utils.mesh_operations import find_neighbors
568+
>>> from mindboggle.extract.extract_fundi import compute_likelihood, connect_points
569+
>>> from mindboggle.utils.mesh_operations import find_anchors, skeletonize, extract_endpoints
570570
>>> data_path = os.environ['MINDBOGGLE_DATA']
571-
>>> label_file = os.path.join(data_path, 'subjects', 'MMRR-21-1',
572-
>>> 'label', 'lh.labels.DKT25.manual.vtk')
573-
>>> points, faces, labels, n_vertices = load_scalar(label_file, True)
574-
>>> label_indices = [i for i,x in enumerate(labels) if x == max(labels)]
575-
>>> binary_array = np.zeros(len(points))
576-
>>> binary_array[label_indices] = 1
577-
>>> indices_to_keep = [min(label_indices), max(label_indices)]
578-
>>> binary_array = skeletonize(binary_array, indices_to_keep, neighbor_lists)
571+
>>> depth_file = os.path.join(data_path, 'measures',
572+
>>> '_hemi_lh_subject_MMRR-21-1', 'lh.pial.depth.vtk')
573+
>>> mean_curvature_file = os.path.join(data_path, 'measures',
574+
>>> '_hemi_lh_subject_MMRR-21-1', 'lh.pial.curv.avg.vtk')
575+
>>> min_curvature_vector_file = os.path.join(data_path, 'measures',
576+
>>> '_hemi_lh_subject_MMRR-21-1', 'lh.pial.curv.min.dir.txt')
577+
>>> sulci_file = os.path.join(data_path, 'results', 'features',
578+
>>> '_hemi_lh_subject_MMRR-21-1', 'sulci.vtk')
579+
>>> points, faces, depths, n_vertices = load_scalar(depth_file, True)
580+
>>> neighbor_lists = find_neighbors(faces, len(points))
581+
>>> points, faces, sulcus_IDs, n_vertices = load_scalar(sulci_file, True)
582+
>>> points, faces, mean_curvatures, n_vertices = load_scalar(mean_curvature_file,
583+
>>> True)
584+
>>> min_directions = np.loadtxt(min_curvature_vector_file)
585+
>>> sulcus_ID = 1
586+
>>> indices_fold = [i for i,x in enumerate(sulcus_IDs) if x == sulcus_ID]
587+
>>> fold_likelihoods = compute_likelihood(depths[indices_fold],
588+
>>> mean_curvatures[indices_fold])
589+
>>> thr = 0.5
590+
>>> L = np.zeros(len(points))
591+
>>> L[indices_fold] = fold_likelihoods
592+
>>> fold_indices_anchors = find_anchors(points[indices_fold, :],
593+
>>> fold_likelihoods, min_directions[indices_fold], 5, thr)
594+
>>> indices_anchors = [indices_fold[x] for x in fold_indices_anchors]
595+
>>> L[L > thr] = 1
596+
>>> L[L <= thr] = 0
597+
>>> anchor_skeleton = skeletonize(L, indices_anchors, neighbor_lists)
598+
>>> indices_skeleton = [i for i,x in enumerate(anchor_skeleton) if x > 0]
599+
>>> indices_endpoints = extract_endpoints(indices_skeleton, neighbor_lists)
600+
>>> indices_endpoints = [x for x in indices_endpoints if x in indices_anchors]
601+
>>> skeleton = skeletonize(L, indices_endpoints, neighbor_lists)
602+
>>> indices_endpoint_skel = [x for x in skeleton if x > 0]
579603
>>> # Write results to vtk file and view with mayavi2:
580-
>>> IDs = -1 * np.ones(len(points))
581-
>>> indices_skeleton = [i for i,x in enumerate(binary_array) if x == 1]
582-
>>> IDs[indices_skeleton] = 1
583-
>>> IDs[indices_to_keep] = 2
584-
>>> rewrite_scalars(label_file, 'test_skeletonize.vtk', IDs, IDs)
604+
>>> skeleton[indices_anchors] = 3
605+
>>> skeleton[indices_endpoints] = 5
606+
>>> rewrite_scalars(depth_file, 'test_skeletonize.vtk', skeleton, skeleton)
585607
>>> os.system('mayavi2 -m Surface -d test_skeletonize.vtk &')
586608
587609
"""
@@ -837,3 +859,57 @@ def compute_distance(point, points):
837859
# Else return None
838860
else:
839861
return None, None
862+
863+
864+
# Skeletonize example
865+
if __name__ == "__main__" :
866+
import os
867+
import numpy as np
868+
from mindboggle.utils.io_vtk import load_scalar, rewrite_scalars
869+
from mindboggle.utils.mesh_operations import find_neighbors
870+
from mindboggle.extract.extract_fundi import compute_likelihood, connect_points
871+
from mindboggle.utils.mesh_operations import find_anchors, skeletonize, extract_endpoints
872+
data_path = os.environ['MINDBOGGLE_DATA']
873+
depth_file = os.path.join(data_path, 'measures',
874+
'_hemi_lh_subject_MMRR-21-1', 'lh.pial.depth.vtk')
875+
mean_curvature_file = os.path.join(data_path, 'measures',
876+
'_hemi_lh_subject_MMRR-21-1', 'lh.pial.curv.avg.vtk')
877+
min_curvature_vector_file = os.path.join(data_path, 'measures',
878+
'_hemi_lh_subject_MMRR-21-1', 'lh.pial.curv.min.dir.txt')
879+
sulci_file = os.path.join(data_path, 'results', 'features',
880+
'_hemi_lh_subject_MMRR-21-1', 'sulci.vtk')
881+
points, faces, depths, n_vertices = load_scalar(depth_file, True)
882+
neighbor_lists = find_neighbors(faces, len(points))
883+
points, faces, sulcus_IDs, n_vertices = load_scalar(sulci_file, True)
884+
points, faces, mean_curvatures, n_vertices = load_scalar(mean_curvature_file,
885+
True)
886+
min_directions = np.loadtxt(min_curvature_vector_file)
887+
sulcus_ID = 1
888+
indices_fold = [i for i,x in enumerate(sulcus_IDs) if x == sulcus_ID]
889+
fold_likelihoods = compute_likelihood(depths[indices_fold],
890+
mean_curvatures[indices_fold])
891+
thr = 0.5
892+
L = np.zeros(len(points))
893+
L[indices_fold] = fold_likelihoods
894+
fold_indices_anchors = find_anchors(points[indices_fold, :],
895+
fold_likelihoods, min_directions[indices_fold], 5, thr)
896+
indices_anchors = [indices_fold[x] for x in fold_indices_anchors]
897+
L[L > thr] = 1
898+
L[L <= thr] = 0
899+
anchor_skeleton = skeletonize(L, indices_anchors, neighbor_lists)
900+
indices_skeleton = [i for i,x in enumerate(anchor_skeleton) if x > 0]
901+
indices_endpoints = extract_endpoints(indices_skeleton, neighbor_lists)
902+
indices_endpoints = [x for x in indices_endpoints if x in indices_anchors]
903+
skeleton = skeletonize(L, indices_endpoints, neighbor_lists)
904+
905+
print("Fold: {0}, L: {1}, skeleton: {2}, anchors: {3}, endpoints: {4}".
906+
format(len([x for x in sulcus_IDs if x == sulcus_ID]),
907+
len([x for x in L if x > 0]),
908+
len([x for x in skeleton if x > 0]),
909+
len(indices_anchors), len(indices_endpoints)))
910+
911+
# Write results to vtk file and view with mayavi2:
912+
skeleton[indices_anchors] = 3
913+
skeleton[indices_endpoints] = 4
914+
rewrite_scalars(depth_file, 'test_skeletonize.vtk', skeleton, skeleton)
915+
os.system('mayavi2 -m Surface -d test_skeletonize.vtk &')

0 commit comments

Comments
 (0)