The open source OpenXR runtime
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

t/hand/async: Add simple pose-prediction

+90 -4
+90 -4
src/xrt/tracking/hand/t_hand_tracking_async.c
··· 11 11 #include "util/u_misc.h" 12 12 #include "util/u_trace_marker.h" 13 13 #include "util/u_logging.h" 14 + #include "util/u_var.h" 14 15 #include "os/os_threading.h" 16 + 17 + #include "math/m_space.h" 18 + #include "math/m_relation_history.h" 15 19 16 20 17 21 //!@todo Definitely needs a destroy function, will leak a ton. ··· 24 28 25 29 struct xrt_frame *frames[2]; 26 30 31 + bool use_prediction; 32 + struct u_var_draggable_f32 prediction_offset_ms; 33 + 27 34 struct 28 35 { 29 36 struct xrt_hand_joint_set hands[2]; ··· 34 41 { 35 42 struct os_mutex mutex; 36 43 struct xrt_hand_joint_set hands[2]; 44 + struct m_relation_history *relation_hist[2]; 37 45 uint64_t timestamp; 38 46 } present; 39 47 ··· 84 92 xrt_frame_reference(&hta->frames[1], NULL); 85 93 os_mutex_lock(&hta->present.mutex); 86 94 hta->present.timestamp = hta->working.timestamp; 87 - hta->present.hands[0] = hta->working.hands[0]; 88 - hta->present.hands[1] = hta->working.hands[1]; 95 + for (int i = 0; i < 2; i++) { 96 + hta->present.hands[i] = hta->working.hands[i]; 97 + 98 + struct xrt_space_relation wrist_rel = 99 + hta->working.hands[i].values.hand_joint_set_default[XRT_HAND_JOINT_WRIST].relation; 100 + 101 + m_relation_history_estimate_motion(hta->present.relation_hist[i], // 102 + &wrist_rel, // 103 + hta->working.timestamp, // 104 + &wrist_rel); 105 + m_relation_history_push(hta->present.relation_hist[i], &wrist_rel, hta->working.timestamp); 106 + } 89 107 os_mutex_unlock(&hta->present.mutex); 90 108 91 109 hta->hand_tracking_work_active = false; ··· 145 163 if (name == XRT_INPUT_GENERIC_HAND_TRACKING_RIGHT) { 146 164 idx = 1; 147 165 } 148 - *out_value = hta->present.hands[idx]; 149 - *out_timestamp_ns = hta->present.timestamp; 166 + 167 + os_mutex_lock(&hta->present.mutex); 168 + 169 + struct xrt_hand_joint_set latest_hand = hta->present.hands[idx]; 170 + 171 + if (!hta->use_prediction) { 172 + *out_value = latest_hand; 173 + *out_timestamp_ns = hta->present.timestamp; 174 + os_mutex_unlock(&hta->present.mutex); 175 + return; 176 + } 177 + 178 + double prediction_offset_ns = (double)hta->prediction_offset_ms.val * (double)U_TIME_1MS_IN_NS; 179 + 180 + desired_timestamp_ns += (uint64_t)prediction_offset_ns; 181 + 182 + struct xrt_space_relation predicted_wrist; 183 + m_relation_history_get(hta->present.relation_hist[idx], desired_timestamp_ns, &predicted_wrist); 184 + 185 + os_mutex_unlock(&hta->present.mutex); 186 + 187 + struct xrt_space_relation latest_wrist = 188 + latest_hand.values.hand_joint_set_default[XRT_HAND_JOINT_WRIST].relation; 189 + 190 + *out_value = latest_hand; 191 + 192 + // apply the pose change from the latest wrist to the predicted wrist 193 + // to all the joints on the hand. 194 + 195 + //!@optimize We could slightly reduce the total number of transforms by putting some of this in 196 + //! ht_async_mainloop 197 + for (int i = 0; i < XRT_HAND_JOINT_COUNT; i++) { 198 + struct xrt_relation_chain xrc = {0}; 199 + m_relation_chain_push_relation(&xrc, &latest_hand.values.hand_joint_set_default[i].relation); 200 + m_relation_chain_push_inverted_relation(&xrc, &latest_wrist); 201 + m_relation_chain_push_relation(&xrc, &predicted_wrist); 202 + m_relation_chain_resolve(&xrc, &out_value->values.hand_joint_set_default[i].relation); 203 + } 204 + 205 + *out_timestamp_ns = desired_timestamp_ns; 150 206 } 151 207 152 208 void ··· 165 221 166 222 t_ht_sync_destroy(&hta->provider); 167 223 224 + for (int i = 0; i < 2; i++) { 225 + m_relation_history_destroy(&hta->present.relation_hist[i]); 226 + } 227 + 168 228 free(hta); 169 229 } 170 230 ··· 181 241 hta->base.get_hand = ht_async_get_hand; 182 242 183 243 hta->provider = sync; 244 + 245 + for (int i = 0; i < 2; i++) { 246 + m_relation_history_create(&hta->present.relation_hist[i]); 247 + } 248 + 249 + u_var_add_root(hta, "Hand-tracking async shim!", 0); 250 + 251 + //!@todo We came up with this value just by seeing what worked - with Index and WMR, we'd be around 40ms late by 252 + //! the time the camera frames arrived and were processed. 253 + 254 + // We _really_ need a way to calibrate this live - something like an exponential filter that looks at the 255 + // typical maximum time between the time at which we were asked for a sample and most recent processed sample 256 + // timestamp. 257 + 258 + hta->prediction_offset_ms.val = -40; 259 + hta->prediction_offset_ms.step = 0.5; 260 + 261 + hta->use_prediction = true; 262 + 263 + // No need to enforce limits, although generally around -40 is what you want. 264 + hta->prediction_offset_ms.min = -1000000; 265 + hta->prediction_offset_ms.max = 1000000; 266 + 267 + u_var_add_bool(hta, &hta->use_prediction, "Predict wrist movement"); 268 + u_var_add_draggable_f32(hta, &hta->prediction_offset_ms, "Amount to time-travel (ms)"); 269 + 184 270 185 271 os_mutex_init(&hta->present.mutex); 186 272 os_thread_helper_init(&hta->mainloop);