Skip to content

Commit 886997a

Browse files
committed
Refactor the eye tracking logic into EyeTracking, so that EyeTrackingDemo is only a wrapper for command purposes
1 parent e6c8612 commit 886997a

File tree

2 files changed

+281
-235
lines changed

2 files changed

+281
-235
lines changed
Lines changed: 272 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,272 @@
1+
package sc.iview.commands.demo.advanced
2+
3+
import graphics.scenery.*
4+
import graphics.scenery.attribute.material.Material
5+
import graphics.scenery.controls.OpenVRHMD
6+
import graphics.scenery.controls.TrackedDeviceType
7+
import graphics.scenery.controls.eyetracking.PupilEyeTracker
8+
import graphics.scenery.primitives.Cylinder
9+
import graphics.scenery.primitives.TextBoard
10+
import graphics.scenery.textures.Texture
11+
import graphics.scenery.utils.SystemHelpers
12+
import graphics.scenery.utils.extensions.minus
13+
import graphics.scenery.utils.extensions.xyz
14+
import graphics.scenery.utils.extensions.xyzw
15+
import graphics.scenery.volumes.Volume
16+
import net.imglib2.type.numeric.integer.UnsignedByteType
17+
import org.joml.*
18+
import org.scijava.ui.behaviour.ClickBehaviour
19+
import java.awt.image.DataBufferByte
20+
import java.io.ByteArrayInputStream
21+
import java.nio.file.Files
22+
import java.nio.file.Paths
23+
import javax.imageio.ImageIO
24+
import kotlin.concurrent.thread
25+
import kotlin.math.PI
26+
27+
/**
28+
* Tracking class used for communicating with eye trackers, tracking cells with them in a sciview VR environment.
29+
* It calls the Hedgehog analysis on the eye tracking results and communicates the results to Mastodon via
30+
* [mastodonCallbackLinkCreate], which is called on every spine graph vertex that is extracted, and
31+
* [mastodonUpdateGraph] which is called after all vertices are iterated, giving Mastodon a chance to rebuild its tracks.
32+
*/
33+
class EyeTracking(
34+
override var mastodonCallbackLinkCreate: ((HedgehogAnalysis.SpineGraphVertex) -> Unit)? = null,
35+
override var mastodonUpdateGraph: (() -> Unit)? = null
36+
): CellTrackingBase() {
37+
38+
val pupilTracker = PupilEyeTracker(calibrationType = PupilEyeTracker.CalibrationType.WorldSpace, port = System.getProperty("PupilPort", "50020").toInt())
39+
40+
val calibrationTarget = Icosphere(0.02f, 2)
41+
val laser = Cylinder(0.005f, 0.2f, 10)
42+
43+
val confidenceThreshold = 0.60f
44+
45+
// var currentVolume = 0
46+
47+
fun run() {
48+
49+
sciview.toggleVRRendering()
50+
logger.info("VR mode has been toggled")
51+
hmd = sciview.hub.getWorkingHMD() as? OpenVRHMD ?: throw IllegalStateException("Could not find headset")
52+
sessionId = "BionicTracking-generated-${SystemHelpers.formatDateTime()}"
53+
sessionDirectory = Files.createDirectory(Paths.get(System.getProperty("user.home"), "Desktop", sessionId))
54+
55+
referenceTarget.visible = false
56+
referenceTarget.ifMaterial{
57+
roughness = 1.0f
58+
metallic = 0.0f
59+
diffuse = Vector3f(0.8f, 0.8f, 0.8f)
60+
}
61+
referenceTarget.name = "Reference Target"
62+
sciview.camera?.addChild(referenceTarget)
63+
64+
calibrationTarget.visible = false
65+
calibrationTarget.material {
66+
roughness = 1.0f
67+
metallic = 0.0f
68+
diffuse = Vector3f(1.0f, 1.0f, 1.0f)
69+
}
70+
calibrationTarget.name = "Calibration Target"
71+
sciview.camera?.addChild(calibrationTarget)
72+
73+
laser.visible = false
74+
laser.ifMaterial{diffuse = Vector3f(1.0f, 1.0f, 1.0f) }
75+
laser.name = "Laser"
76+
sciview.addNode(laser)
77+
78+
val shell = Box(Vector3f(20.0f, 20.0f, 20.0f), insideNormals = true)
79+
shell.ifMaterial{
80+
cullingMode = Material.CullingMode.Front
81+
diffuse = Vector3f(0.4f, 0.4f, 0.4f) }
82+
83+
shell.spatial().position = Vector3f(0.0f, 0.0f, 0.0f)
84+
shell.name = "Shell"
85+
sciview.addNode(shell)
86+
87+
val volnodes = sciview.findNodes { node -> Volume::class.java.isAssignableFrom(node.javaClass) }
88+
89+
val v = (volnodes.firstOrNull() as? Volume)
90+
if(v == null) {
91+
logger.warn("No volume found, bailing")
92+
return
93+
} else {
94+
logger.info("found ${volnodes.size} volume nodes. Using the first one: ${volnodes.first()}")
95+
volume = v
96+
}
97+
volume.visible = false
98+
99+
val bb = BoundingGrid()
100+
bb.node = volume
101+
bb.visible = false
102+
103+
sciview.addNode(hedgehogs)
104+
105+
val eyeFrames = Mesh("eyeFrames")
106+
val left = Box(Vector3f(1.0f, 1.0f, 0.001f))
107+
val right = Box(Vector3f(1.0f, 1.0f, 0.001f))
108+
left.spatial().position = Vector3f(-1.0f, 1.5f, 0.0f)
109+
left.spatial().rotation = left.spatial().rotation.rotationZ(PI.toFloat())
110+
right.spatial().position = Vector3f(1.0f, 1.5f, 0.0f)
111+
eyeFrames.addChild(left)
112+
eyeFrames.addChild(right)
113+
114+
sciview.addNode(eyeFrames)
115+
116+
val pupilFrameLimit = 20
117+
var lastFrame = System.nanoTime()
118+
119+
pupilTracker.subscribeFrames { eye, texture ->
120+
if(System.nanoTime() - lastFrame < pupilFrameLimit*10e5) {
121+
return@subscribeFrames
122+
}
123+
124+
val node = if(eye == 1) {
125+
left
126+
} else {
127+
right
128+
}
129+
130+
val stream = ByteArrayInputStream(texture)
131+
val image = ImageIO.read(stream)
132+
val data = (image.raster.dataBuffer as DataBufferByte).data
133+
134+
node.ifMaterial {
135+
textures["diffuse"] = Texture(
136+
Vector3i(image.width, image.height, 1),
137+
3,
138+
UnsignedByteType(),
139+
BufferUtils.allocateByteAndPut(data)
140+
) }
141+
142+
lastFrame = System.nanoTime()
143+
}
144+
145+
// TODO: Replace with cam.showMessage()
146+
val debugBoard = TextBoard()
147+
debugBoard.name = "debugBoard"
148+
debugBoard.scale = Vector3f(0.05f, 0.05f, 0.05f)
149+
debugBoard.position = Vector3f(0.0f, -0.3f, -0.9f)
150+
debugBoard.text = ""
151+
debugBoard.visible = false
152+
sciview.camera?.addChild(debugBoard)
153+
154+
val lights = Light.createLightTetrahedron<PointLight>(Vector3f(0.0f, 0.0f, 0.0f), spread = 5.0f, radius = 15.0f, intensity = 5.0f)
155+
lights.forEach { sciview.addNode(it) }
156+
157+
thread {
158+
logger.info("Adding onDeviceConnect handlers")
159+
hmd.events.onDeviceConnect.add { hmd, device, timestamp ->
160+
logger.info("onDeviceConnect called, cam=${sciview.camera}")
161+
if(device.type == TrackedDeviceType.Controller) {
162+
logger.info("Got device ${device.name} at $timestamp")
163+
device.model?.let { hmd.attachToNode(device, it, sciview.camera) }
164+
}
165+
}
166+
}
167+
thread{
168+
logger.info("started thread for inputSetup")
169+
inputSetup()
170+
setupCalibration()
171+
}
172+
173+
launchHedgehogThread()
174+
}
175+
176+
177+
private fun setupCalibration(keybindingCalibration: String = "N", keybindingTracking: String = "U") {
178+
val startCalibration = ClickBehaviour { _, _ ->
179+
thread {
180+
val cam = sciview.camera as? DetachedHeadCamera ?: return@thread
181+
pupilTracker.gazeConfidenceThreshold = confidenceThreshold
182+
if (!pupilTracker.isCalibrated) {
183+
logger.info("pupil is currently uncalibrated")
184+
pupilTracker.onCalibrationInProgress = {
185+
cam.showMessage("Crunching equations ...",distance = 2f, size = 0.2f, messageColor = Vector4f(1.0f, 0.8f, 0.0f, 1.0f), duration = 15000, centered = true)
186+
}
187+
188+
pupilTracker.onCalibrationFailed = {
189+
cam.showMessage("Calibration failed.",distance = 2f, size = 0.2f, messageColor = Vector4f(1.0f, 0.0f, 0.0f, 1.0f), centered = true)
190+
}
191+
192+
pupilTracker.onCalibrationSuccess = {
193+
cam.showMessage("Calibration succeeded!", distance = 2f, size = 0.2f, messageColor = Vector4f(0.0f, 1.0f, 0.0f, 1.0f), centered = true)
194+
// cam.children.find { it.name == "debugBoard" }?.visible = true
195+
196+
for (i in 0 until 20) {
197+
referenceTarget.ifMaterial{diffuse = Vector3f(0.0f, 1.0f, 0.0f) }
198+
Thread.sleep(100)
199+
referenceTarget.ifMaterial { diffuse = Vector3f(0.8f, 0.8f, 0.8f) }
200+
Thread.sleep(30)
201+
}
202+
203+
hmd.removeBehaviour("start_calibration")
204+
hmd.removeKeyBinding("start_calibration")
205+
206+
val toggleTracking = ClickBehaviour { _, _ ->
207+
if (tracking) {
208+
logger.info("deactivating tracking...")
209+
referenceTarget.ifMaterial { diffuse = Vector3f(0.5f, 0.5f, 0.5f) }
210+
cam.showMessage("Tracking deactivated.",distance = 2f, size = 0.2f, centered = true)
211+
dumpHedgehog()
212+
} else {
213+
logger.info("activating tracking...")
214+
addHedgehog()
215+
referenceTarget.ifMaterial { diffuse = Vector3f(1.0f, 0.0f, 0.0f) }
216+
cam.showMessage("Tracking active.",distance = 2f, size = 0.2f, centered = true)
217+
}
218+
tracking = !tracking
219+
}
220+
hmd.addBehaviour("toggle_tracking", toggleTracking)
221+
hmd.addKeyBinding("toggle_tracking", keybindingTracking)
222+
223+
volume.visible = true
224+
// volume.runRecursive { it.visible = true }
225+
playing = true
226+
}
227+
228+
pupilTracker.unsubscribeFrames()
229+
sciview.deleteNode(sciview.find("eyeFrames"))
230+
231+
logger.info("Starting eye tracker calibration")
232+
cam.showMessage("Follow the white rabbit.", distance = 2f, size = 0.2f,duration = 1500, centered = true)
233+
pupilTracker.calibrate(cam, hmd,
234+
generateReferenceData = true,
235+
calibrationTarget = calibrationTarget)
236+
237+
pupilTracker.onGazeReceived = when (pupilTracker.calibrationType) {
238+
//NEW
239+
PupilEyeTracker.CalibrationType.WorldSpace -> { gaze ->
240+
if (gaze.confidence > confidenceThreshold) {
241+
val p = gaze.gazePoint()
242+
referenceTarget.visible = true
243+
// Pupil has mm units, so we divide by 1000 here to get to scenery units
244+
referenceTarget.spatial().position = p
245+
(cam.children.find { it.name == "debugBoard" } as? TextBoard)?.text = "${String.format("%.2f", p.x())}, ${String.format("%.2f", p.y())}, ${String.format("%.2f", p.z())}"
246+
247+
val headCenter = cam.spatial().viewportToWorld(Vector2f(0.0f, 0.0f))
248+
val pointWorld = Matrix4f(cam.spatial().world).transform(p.xyzw()).xyz()
249+
val direction = (pointWorld - headCenter).normalize()
250+
251+
if (tracking) {
252+
// log.info("Starting spine from $headCenter to $pointWorld")
253+
addSpine(headCenter, direction, volume, gaze.confidence, volume.viewerState.currentTimepoint)
254+
}
255+
}
256+
}
257+
258+
// else -> {gaze-> }
259+
}
260+
261+
logger.info("Calibration routine done.")
262+
}
263+
264+
// bind calibration start to menu key on controller
265+
266+
}
267+
}
268+
hmd.addBehaviour("start_calibration", startCalibration)
269+
hmd.addKeyBinding("start_calibration", keybindingCalibration)
270+
}
271+
272+
}

0 commit comments

Comments
 (0)