PageRenderTime 177ms CodeModel.GetById 22ms RepoModel.GetById 1ms app.codeStats 0ms

/vrpn_Tracker_ViewPoint.C

https://gitlab.com/sat-metalab/vrpn
C | 225 lines | 98 code | 40 blank | 87 comment | 15 complexity | fab9bd8120e86d5f12ab0ef563c8d924 MD5 | raw file
  1. ///////////////////////////////////////////////////////////////////////////////////////////////
  2. //
  3. // Name: vrpn_Tracker_ViewPoint.C
  4. //
  5. // Author: David Borland
  6. //
  7. // EventLab at the University of Barcelona
  8. //
  9. // Description: VRPN server class for Arrington Research ViewPoint EyeTracker.
  10. //
  11. // The VRPN server connects to the eye tracker using the VPX_InterApp DLL.
  12. // Whatever other control software is being used to connect to the eye tracker
  13. // (e.g. the ViewPoint software that comes with the tracker) to perform
  14. // calibration, etc. should link to the same copy of the DLL, so they can share
  15. // information.
  16. //
  17. // -------------------------------------------------------------------------------
  18. //
  19. // Tracker:
  20. //
  21. // The tracker has two sensors, as the ViewPoint can optionally have binocular
  22. // tracking. In the case of monocular tracking, only sensor 0 (EYE_A) will have
  23. // valid information. Retrieving smoothed or raw tracking data is controlled by
  24. // the smoothedData parameter.
  25. //
  26. // Position: The (x,y) gaze point in gaze space (smoothed or raw).
  27. //
  28. // Rotation: The (x,y) gaze angle as a quaternion (smoothed or raw).
  29. //
  30. // Velocity: The x- and y- components of the eye movement velocity in gaze space
  31. // (always smoothed).
  32. //
  33. // -------------------------------------------------------------------------------
  34. //
  35. // Analog:
  36. //
  37. // There are a lot of additional data that can be retrieved from the tracker.
  38. // These values are always calculated from the smoothed gaze point. Currently,
  39. // the following are sent as analog values, but more can be added as needed.
  40. // Please see the ViewPoint documentation regarding what other data are available.
  41. //
  42. // Because each channel needs to be duplicated in the case of a binocular tracker,
  43. // the first n/2 values are for EYE_A, and the second n/2 values are for EYE_B.
  44. //
  45. // EYE_A:
  46. //
  47. // Channel 0: The pupil aspect ratio, from 0.0 to 1.0. Can be used to detect
  48. // blinks when it falls below a given threshold.
  49. //
  50. // Channel 1: The total velocity (magnitude of eye movement velocity). Can be
  51. // used to detect saccades.
  52. //
  53. // Channel 2: The fixation seconds (length of time below the velocity criterion
  54. // used to detect saccades). 0 if saccade is occurring.
  55. //
  56. // EYE_B:
  57. //
  58. // Channels 3-5: See EYE_A.
  59. //
  60. ///////////////////////////////////////////////////////////////////////////////////////////////
  61. #include "vrpn_Tracker_ViewPoint.h"
  62. #ifdef VRPN_USE_VIEWPOINT
  63. #include VRPN_VIEWPOINT_H
  64. #include "quat.h"
  65. vrpn_Tracker_ViewPoint::vrpn_Tracker_ViewPoint(const char* name, vrpn_Connection* c, bool smoothedData) :
  66. vrpn_Tracker(name, c), vrpn_Analog(name, c), useSmoothedData(smoothedData)
  67. {
  68. // Check the DLL version
  69. double version = VPX_GetDLLVersion();
  70. if (VPX_VersionMismatch(VPX_SDK_VERSION)) {
  71. fprintf(stderr, "vrpn_Tracker_ViewPoint::vrpn_Tracker_ViewPoint(): Warning, SDK version is %g, while DLL version is %g \n", version, VPX_SDK_VERSION);
  72. }
  73. else {
  74. printf("vrpn_Tracker_ViewPoint::vrpn_Tracker_ViewPoint(): SDK version %g matches DLL version %g \n", version, VPX_SDK_VERSION);
  75. }
  76. // Two sensors, one for each eye
  77. vrpn_Tracker::num_sensors = 2;
  78. // Currently 3 analog channels per eye
  79. const int channels_per_eye = 3;
  80. // Total number of channels is two times the number of channels per eye
  81. vrpn_Analog::num_channel = channels_per_eye * 2;
  82. // VRPN stuff
  83. register_server_handlers();
  84. }
  85. vrpn_Tracker_ViewPoint::~vrpn_Tracker_ViewPoint()
  86. {
  87. }
  88. void vrpn_Tracker_ViewPoint::mainloop()
  89. {
  90. // Call the server mainloop
  91. server_mainloop();
  92. // Get data from the DLL
  93. get_report();
  94. }
  95. void vrpn_Tracker_ViewPoint::get_report()
  96. {
  97. // Get a time stamp
  98. struct timeval current_time;
  99. vrpn_gettimeofday(&current_time, NULL);
  100. // Set the time stamp for each device type
  101. vrpn_Tracker::timestamp = current_time;
  102. vrpn_Analog::timestamp = current_time;
  103. // Get tracker and analog data
  104. get_tracker();
  105. get_analog();
  106. }
  107. void vrpn_Tracker_ViewPoint::get_tracker()
  108. {
  109. // Get information for each eye
  110. for (int i = 0; i < 2; i++) {
  111. // The sensor
  112. d_sensor = i;
  113. // Which eye?
  114. VPX_EyeType eye;
  115. if (d_sensor == 0) eye = EYE_A;
  116. else eye = EYE_B;
  117. // Get tracker data from the DLL
  118. VPX_RealPoint gp, cv, ga;
  119. if (useSmoothedData) {
  120. // Use smoothed data, when available
  121. VPX_GetGazePointSmoothed2(eye, &gp);
  122. VPX_GetComponentVelocity2(eye, &cv); // Always smoothed
  123. VPX_GetGazeAngleSmoothed2(eye, &ga);
  124. }
  125. else {
  126. // Use raw data
  127. VPX_GetGazePoint2(eye, &gp);
  128. VPX_GetComponentVelocity2(eye, &cv); // Always smoothed
  129. VPX_GetGazeAngle2(eye, &ga);
  130. }
  131. // Set the tracker position from the gaze point
  132. pos[0] = gp.x;
  133. pos[1] = gp.y;
  134. pos[2] = 0.0;
  135. // Set the tracker velocity from the eye velocity
  136. vel[0] = cv.x;
  137. vel[1] = cv.y;
  138. vel[2] = 0.0;
  139. // Convert the gaze angle to a quaternion
  140. q_from_euler(d_quat, 0.0, Q_DEG_TO_RAD(ga.y), Q_DEG_TO_RAD(ga.x));
  141. // Send the data for this eye
  142. send_report();
  143. }
  144. }
  145. void vrpn_Tracker_ViewPoint::get_analog()
  146. {
  147. // Get information for each eye
  148. for (int i = 0; i < 2; i++) {
  149. // Which eye?
  150. VPX_EyeType eye;
  151. if (i == 0) eye = EYE_A;
  152. else eye = EYE_B;
  153. // Analog channel index offset for second eye
  154. unsigned int eyeOffset = i * vrpn_Analog::num_channel / 2;
  155. // Get analog information from the DLL
  156. double ar, tv, fs;
  157. VPX_GetPupilAspectRatio2(eye, &ar);
  158. VPX_GetTotalVelocity2(eye, &tv);
  159. VPX_GetFixationSeconds2(eye, &fs);
  160. // Set the analog channels
  161. channel[0 + eyeOffset] = ar;
  162. channel[1 + eyeOffset] = tv;
  163. channel[2 + eyeOffset] = fs;
  164. }
  165. // Send all analog data
  166. vrpn_Analog::report_changes();
  167. }
  168. void vrpn_Tracker_ViewPoint::send_report()
  169. {
  170. // Send tracker data
  171. if (d_connection) {
  172. char msgbuf[1000];
  173. int len = vrpn_Tracker::encode_to(msgbuf);
  174. if (d_connection->pack_message(len, vrpn_Tracker::timestamp, position_m_id, d_sender_id, msgbuf,
  175. vrpn_CONNECTION_LOW_LATENCY)) {
  176. fprintf(stderr,"vrpn_Tracker_ViewPoint: cannot write message: tossing\n");
  177. }
  178. len = vrpn_Tracker::encode_vel_to(msgbuf);
  179. if (d_connection->pack_message(len, vrpn_Tracker::timestamp, velocity_m_id, d_sender_id, msgbuf,
  180. vrpn_CONNECTION_LOW_LATENCY)) {
  181. fprintf(stderr,"vrpn_Tracker_ViewPoint: cannot write message: tossing\n");
  182. }
  183. }
  184. }
  185. #endif