The open source OpenXR runtime
1/* Copyright 2021, Jan Schmidt
2 * SPDX-License-Identifier: BSL-1.0
3 */
4/*!
5 * @file
6 * @brief Driver code to read WMR config blocks
7 * @author Jan Schmidt <jan@centricular.com>
8 * @ingroup drv_wmr
9 */
10
11#include "math/m_api.h"
12
13#include "util/u_debug.h"
14#include "util/u_misc.h"
15#include "util/u_json.h"
16
17#include "wmr_config.h"
18
19#include <assert.h>
20#include <string.h>
21
22
23#define WMR_TRACE(log_level, ...) U_LOG_IFL_T(log_level, __VA_ARGS__)
24#define WMR_DEBUG(log_level, ...) U_LOG_IFL_D(log_level, __VA_ARGS__)
25#define WMR_INFO(log_level, ...) U_LOG_IFL_I(log_level, __VA_ARGS__)
26#define WMR_WARN(log_level, ...) U_LOG_IFL_W(log_level, __VA_ARGS__)
27#define WMR_ERROR(log_level, ...) U_LOG_IFL_E(log_level, __VA_ARGS__)
28
29#define JSON_INT(a, b, c) u_json_get_int(u_json_get(a, b), c)
30#define JSON_FLOAT(a, b, c) u_json_get_float(u_json_get(a, b), c)
31#define JSON_DOUBLE(a, b, c) u_json_get_double(u_json_get(a, b), c)
32#define JSON_VEC3(a, b, c) u_json_get_vec3_array(u_json_get(a, b), c)
33#define JSON_MATRIX_3X3(a, b, c) u_json_get_matrix_3x3(u_json_get(a, b), c)
34#define JSON_STRING(a, b, c) u_json_get_string_into_array(u_json_get(a, b), c, sizeof(c))
35
36//! Specifies the maximum number of cameras to use for SLAM tracking
37DEBUG_GET_ONCE_NUM_OPTION(wmr_max_slam_cams, "WMR_MAX_SLAM_CAMS", WMR_MAX_CAMERAS)
38
39static void
40wmr_hmd_config_init_defaults(struct wmr_hmd_config *c)
41{
42 memset(c, 0, sizeof(struct wmr_hmd_config));
43
44 // initialize default sensor transforms
45 math_pose_identity(&c->eye_params[0].pose);
46 math_pose_identity(&c->eye_params[1].pose);
47
48 math_pose_identity(&c->sensors.accel.pose);
49 math_pose_identity(&c->sensors.gyro.pose);
50 math_pose_identity(&c->sensors.mag.pose);
51
52 math_matrix_3x3_identity(&c->sensors.accel.mix_matrix);
53 math_matrix_3x3_identity(&c->sensors.gyro.mix_matrix);
54 math_matrix_3x3_identity(&c->sensors.mag.mix_matrix);
55}
56
57static struct xrt_pose
58pose_from_rt(const struct xrt_matrix_3x3 rotation_rm, const struct xrt_vec3 translation)
59{
60 struct xrt_matrix_3x3 rotation_cm;
61 math_matrix_3x3_transpose(&rotation_rm, &rotation_cm);
62
63 struct xrt_matrix_4x4 mat = {0};
64 math_matrix_4x4_isometry_from_rt(&rotation_cm, &translation, &mat);
65
66 struct xrt_pose pose;
67 math_pose_from_isometry(&mat, &pose);
68
69 return pose;
70}
71
72static bool
73wmr_config_parse_display(struct wmr_hmd_config *c, cJSON *display, enum u_logging_level log_level)
74{
75 cJSON *json_eye = cJSON_GetObjectItem(display, "AssignedEye");
76 char *json_eye_name = cJSON_GetStringValue(json_eye);
77
78 if (json_eye_name == NULL) {
79 WMR_ERROR(log_level, "Invalid/missing eye assignment block");
80 return false;
81 }
82
83 struct wmr_distortion_eye_config *eye = NULL;
84 if (!strcmp(json_eye_name, "CALIBRATION_DisplayEyeLeft")) {
85 eye = &c->eye_params[0];
86 } else if (!strcmp(json_eye_name, "CALIBRATION_DisplayEyeRight")) {
87 eye = &c->eye_params[1];
88 } else {
89 WMR_ERROR(log_level, "Unknown AssignedEye \"%s\"", json_eye_name);
90 return false;
91 }
92
93 struct xrt_matrix_3x3 affine_xform;
94
95 /* Extract display panel parameters */
96 cJSON *affine = cJSON_GetObjectItem(display, "Affine");
97 if (affine == NULL || u_json_get_float_array(affine, affine_xform.v, 9) != 9) {
98 WMR_ERROR(log_level, "Missing affine transform for AssignedEye \"%s\"", json_eye_name);
99 return false;
100 }
101
102 math_matrix_3x3_inverse(&affine_xform, &eye->poly_3k.inv_affine_xform);
103
104 if (!JSON_FLOAT(display, "DisplayWidth", &eye->display_size.x) ||
105 !JSON_FLOAT(display, "DisplayHeight", &eye->display_size.y))
106 return false;
107
108 const struct xrt_vec2_i32 display_size_px = {(int32_t)eye->display_size.x, (int32_t)eye->display_size.y};
109
110 cJSON *visible_area_center = cJSON_GetObjectItem(display, "VisibleAreaCenter");
111 if (visible_area_center == NULL || !JSON_FLOAT(visible_area_center, "X", &eye->visible_center.x) ||
112 !JSON_FLOAT(visible_area_center, "Y", &eye->visible_center.y)) {
113 return false;
114 }
115
116 if (!JSON_DOUBLE(display, "VisibleAreaRadius", &eye->visible_radius))
117 return false;
118
119 /* Compute eye pose */
120 cJSON *rt = cJSON_GetObjectItem(display, "Rt");
121 cJSON *rx = cJSON_GetObjectItem(rt, "Rotation");
122 if (rt == NULL || rx == NULL)
123 return false;
124
125 struct xrt_vec3 translation;
126 struct xrt_matrix_3x3 rotation;
127
128 if (!JSON_VEC3(rt, "Translation", &translation))
129 return false;
130
131 if (u_json_get_float_array(rx, rotation.v, 9) != 9)
132 return false;
133
134 eye->pose = pose_from_rt(rotation, translation);
135 eye->translation = translation;
136 eye->rotation = rotation;
137
138 /* Parse color distortion channels */
139 const char *channel_names[] = {"DistortionRed", "DistortionGreen", "DistortionBlue"};
140
141 for (int channel = 0; channel < 3; ++channel) {
142 struct u_poly_3k_eye_values *distortion3K = &eye->poly_3k;
143
144 cJSON *dist = cJSON_GetObjectItemCaseSensitive(display, channel_names[channel]);
145 if (!dist) {
146 WMR_ERROR(log_level, "Missing distortion channel info %s", channel_names[channel]);
147 return false;
148 }
149
150 const char *model_type = cJSON_GetStringValue(cJSON_GetObjectItemCaseSensitive(dist, "ModelType"));
151 if (model_type == NULL) {
152 WMR_ERROR(log_level, "Missing distortion type");
153 return false;
154 }
155
156 if (strcmp(model_type, "CALIBRATION_DisplayDistortionModelPolynomial3K") != 0) {
157 WMR_ERROR(log_level, "Unknown display distortion model %s", model_type);
158 return false;
159 }
160
161 int param_count;
162 double parameters[5];
163
164 if (!JSON_INT(dist, "ModelParameterCount", ¶m_count)) {
165 WMR_ERROR(log_level, "Missing distortion parameters");
166 return false;
167 }
168
169 cJSON *params_json = cJSON_GetObjectItemCaseSensitive(dist, "ModelParameters");
170 if (params_json == NULL ||
171 u_json_get_double_array(params_json, parameters, param_count) != (size_t)param_count) {
172 WMR_ERROR(log_level, "Missing distortion parameters");
173 return false;
174 }
175
176 distortion3K->channels[channel].eye_center.x = parameters[0];
177 distortion3K->channels[channel].eye_center.y = parameters[1];
178
179 distortion3K->channels[channel].display_size = display_size_px;
180
181 distortion3K->channels[channel].k[0] = parameters[2];
182 distortion3K->channels[channel].k[1] = parameters[3];
183 distortion3K->channels[channel].k[2] = parameters[4];
184 }
185
186 return true;
187}
188
189static bool
190wmr_inertial_sensor_config_parse(struct wmr_inertial_sensor_config *c, cJSON *sensor, enum u_logging_level log_level)
191{
192 struct xrt_vec3 translation;
193 struct xrt_matrix_3x3 rotation;
194
195 cJSON *rt = cJSON_GetObjectItem(sensor, "Rt");
196 cJSON *rx = cJSON_GetObjectItem(rt, "Rotation");
197 if (rt == NULL || rx == NULL) {
198 WMR_WARN(log_level, "Missing Inertial Sensor calibration");
199 return false;
200 }
201
202 if (!JSON_VEC3(rt, "Translation", &translation) || u_json_get_float_array(rx, rotation.v, 9) != 9) {
203 WMR_WARN(log_level, "Invalid Inertial Sensor calibration");
204 return false;
205 }
206
207 c->pose = pose_from_rt(rotation, translation);
208 c->translation = translation;
209 c->rotation = rotation;
210
211 /* compute the bias offsets and mix matrix by taking the constant
212 * coefficients from the configuration */
213 cJSON *mix_model = cJSON_GetObjectItem(sensor, "MixingMatrixTemperatureModel");
214 cJSON *bias_model = cJSON_GetObjectItem(sensor, "BiasTemperatureModel");
215 cJSON *bias_var = cJSON_GetObjectItem(sensor, "BiasUncertainty");
216 cJSON *noise_std = cJSON_GetObjectItem(sensor, "Noise");
217
218 float mix_model_values[3 * 3 * 4];
219 float bias_model_values[12];
220 float bias_var_values[3];
221 float noise_std_values[3 * 2];
222
223 if (mix_model == NULL || bias_model == NULL || noise_std == NULL || bias_var == NULL) {
224 WMR_WARN(log_level, "Missing Inertial Sensor calibration");
225 return false;
226 }
227
228 if (u_json_get_float_array(mix_model, mix_model_values, 3 * 3 * 4) != 3 * 3 * 4) {
229 WMR_WARN(log_level, "Invalid Inertial Sensor calibration (invalid MixingMatrixTemperatureModel)");
230 return false;
231 }
232 for (int i = 0; i < 9; i++) {
233 c->mix_matrix.v[i] = mix_model_values[i * 4];
234 }
235
236 if (u_json_get_float_array(bias_model, bias_model_values, 12) != 12) {
237 WMR_WARN(log_level, "Invalid Inertial Sensor calibration (invalid BiasTemperatureModel)");
238 return false;
239 }
240 c->bias_offsets.x = bias_model_values[0];
241 c->bias_offsets.y = bias_model_values[4];
242 c->bias_offsets.z = bias_model_values[8];
243
244 if (u_json_get_float_array(bias_var, bias_var_values, 3) != 3) {
245 WMR_WARN(log_level, "Invalid Inertial Sensor calibration (invalid BiasUncertainty)");
246 return false;
247 }
248 c->bias_var.x = bias_var_values[0];
249 c->bias_var.y = bias_var_values[1];
250 c->bias_var.z = bias_var_values[2];
251
252 if (u_json_get_float_array(noise_std, noise_std_values, 3 * 2) != 3 * 2) {
253 WMR_WARN(log_level, "Invalid Inertial Sensor calibration (invalid Noise)");
254 return false;
255 }
256 c->noise_std.x = noise_std_values[0];
257 c->noise_std.y = noise_std_values[1];
258 c->noise_std.z = noise_std_values[2];
259 return true;
260}
261
262static bool
263wmr_inertial_sensors_config_parse(struct wmr_inertial_sensors_config *c, cJSON *sensor, enum u_logging_level log_level)
264{
265 struct wmr_inertial_sensor_config *target = NULL;
266
267 const char *sensor_type = cJSON_GetStringValue(cJSON_GetObjectItem(sensor, "SensorType"));
268 if (sensor_type == NULL) {
269 WMR_WARN(log_level, "Missing sensor type");
270 return false;
271 }
272
273 if (!strcmp(sensor_type, "CALIBRATION_InertialSensorType_Gyro")) {
274 target = &c->gyro;
275 } else if (!strcmp(sensor_type, "CALIBRATION_InertialSensorType_Accelerometer")) {
276 target = &c->accel;
277 } else if (!strcmp(sensor_type, "CALIBRATION_InertialSensorType_Magnetometer")) {
278 target = &c->mag;
279 } else {
280 WMR_WARN(log_level, "Unhandled sensor type \"%s\"", sensor_type);
281 return false;
282 }
283
284 return wmr_inertial_sensor_config_parse(target, sensor, log_level);
285}
286
287static bool
288wmr_config_parse_camera_config(struct wmr_hmd_config *c, cJSON *camera, enum u_logging_level log_level)
289{
290 if (c->cam_count == WMR_MAX_CAMERAS) {
291 WMR_ERROR(log_level, "Too many camera entries. Enlarge WMR_MAX_CAMERAS");
292 return false;
293 }
294
295 struct wmr_camera_config *cam_config = c->cams + c->cam_count;
296
297 /* Camera purpose */
298 cJSON *json_purpose = cJSON_GetObjectItem(camera, "Purpose");
299 char *json_purpose_name = cJSON_GetStringValue(json_purpose);
300 if (json_purpose_name == NULL) {
301 WMR_ERROR(log_level, "Invalid camera calibration block %d - camera purpose not found", c->cam_count);
302 return false;
303 }
304
305 if (!strcmp(json_purpose_name, "CALIBRATION_CameraPurposeHeadTracking")) {
306 cam_config->purpose = WMR_CAMERA_PURPOSE_HEAD_TRACKING;
307 } else if (!strcmp(json_purpose_name, "CALIBRATION_CameraPurposeDisplayObserver")) {
308 cam_config->purpose = WMR_CAMERA_PURPOSE_DISPLAY_OBSERVER;
309 } else {
310 WMR_ERROR(log_level, "Unknown camera purpose: \"%s\" (camera %d)", json_purpose_name, c->cam_count);
311 return false;
312 }
313
314 cJSON *json_location = cJSON_GetObjectItem(camera, "Location");
315 char *json_location_name = cJSON_GetStringValue(json_location);
316 if (json_location_name == NULL) {
317 WMR_ERROR(log_level, "Invalid camera calibration block %d - location", c->cam_count);
318 return false;
319 }
320
321 if (!strcmp(json_location_name, "CALIBRATION_CameraLocationHT0")) {
322 cam_config->location = WMR_CAMERA_LOCATION_HT0;
323 } else if (!strcmp(json_location_name, "CALIBRATION_CameraLocationHT1")) {
324 cam_config->location = WMR_CAMERA_LOCATION_HT1;
325 } else if (!strcmp(json_location_name, "CALIBRATION_CameraLocationHT2")) {
326 cam_config->location = WMR_CAMERA_LOCATION_HT2;
327 } else if (!strcmp(json_location_name, "CALIBRATION_CameraLocationHT3")) {
328 cam_config->location = WMR_CAMERA_LOCATION_HT3;
329 } else if (!strcmp(json_location_name, "CALIBRATION_CameraLocationDO0")) {
330 cam_config->location = WMR_CAMERA_LOCATION_DO0;
331 } else if (!strcmp(json_location_name, "CALIBRATION_CameraLocationDO1")) {
332 cam_config->location = WMR_CAMERA_LOCATION_DO1;
333 } else {
334 WMR_ERROR(log_level, "Unknown camera location: \"%s\" (camera %d)", json_location_name, c->cam_count);
335 return false;
336 }
337
338 /* Camera pose */
339 struct xrt_vec3 translation;
340 struct xrt_matrix_3x3 rotation;
341
342 cJSON *rt = cJSON_GetObjectItem(camera, "Rt");
343 cJSON *rx = cJSON_GetObjectItem(rt, "Rotation");
344 if (rt == NULL || rx == NULL) {
345 WMR_ERROR(log_level, "Invalid camera calibration block %d - pose", c->cam_count);
346 return false;
347 }
348
349 if (!JSON_VEC3(rt, "Translation", &translation) || u_json_get_float_array(rx, rotation.v, 9) != 9) {
350 WMR_ERROR(log_level, "Invalid camera calibration block %d - pose", c->cam_count);
351 return false;
352 }
353
354 cam_config->pose = pose_from_rt(rotation, translation);
355 cam_config->translation = translation;
356 cam_config->rotation = rotation;
357
358 if (!JSON_INT(camera, "SensorWidth", &cam_config->roi.extent.w) ||
359 !JSON_INT(camera, "SensorHeight", &cam_config->roi.extent.h)) {
360 WMR_ERROR(log_level, "Invalid camera calibration block %d - sensor size", c->cam_count);
361 return false;
362 }
363 cam_config->roi.offset.w = c->tcam_count * cam_config->roi.extent.w; // Assume all tracking cams have same width
364 cam_config->roi.offset.h = 1; // Ignore first metadata row
365
366 /* Distortion information */
367 cJSON *dist = cJSON_GetObjectItemCaseSensitive(camera, "Intrinsics");
368 if (!dist) {
369 WMR_ERROR(log_level, "Invalid camera calibration block %d - distortion", c->cam_count);
370 return false;
371 }
372
373 const char *model_type = cJSON_GetStringValue(cJSON_GetObjectItemCaseSensitive(dist, "ModelType"));
374 if (model_type == NULL) {
375 WMR_ERROR(log_level, "Invalid camera calibration block %d - missing distortion type", c->cam_count);
376 return false;
377 }
378
379 if (!strcmp(model_type, "CALIBRATION_LensDistortionModelRational6KT")) {
380 } else {
381 WMR_ERROR(log_level, "Invalid camera calibration block %d - unknown distortion type %s", c->cam_count,
382 model_type);
383 return false;
384 }
385
386 struct wmr_distortion_6KT *distortion6KT = &cam_config->distortion6KT;
387
388 int param_count;
389 if (!JSON_INT(dist, "ModelParameterCount", ¶m_count)) {
390 WMR_ERROR(log_level, "Invalid camera calibration block %d - no ModelParameterCount", c->cam_count);
391 return false;
392 }
393
394 if (param_count != 15) {
395 WMR_ERROR(log_level, "Invalid camera calibration block %d - wrong ModelParameterCount %d", c->cam_count,
396 param_count);
397 return false;
398 }
399
400 cJSON *params_json = cJSON_GetObjectItemCaseSensitive(dist, "ModelParameters");
401 if (params_json == NULL ||
402 u_json_get_float_array(params_json, distortion6KT->v, param_count) != (size_t)param_count) {
403 WMR_ERROR(log_level, "Invalid camera calibration block %d - missing distortion parameters",
404 c->cam_count);
405 return false;
406 }
407
408 if (cam_config->purpose == WMR_CAMERA_PURPOSE_HEAD_TRACKING) {
409 c->tcams[c->tcam_count] = cam_config;
410 c->tcam_count++;
411 }
412
413 c->cam_count++;
414 return true;
415}
416
417static bool
418wmr_config_parse_calibration(struct wmr_hmd_config *c, cJSON *calib_info, enum u_logging_level log_level)
419{
420 cJSON *item = NULL;
421
422 // calib_info is object with keys "Cameras", "Displays", and "InertialSensors"
423 cJSON *displays = cJSON_GetObjectItemCaseSensitive(calib_info, "Displays");
424 if (!cJSON_IsArray(displays)) {
425 WMR_ERROR(log_level, "Displays: not found or not an Array");
426 return false;
427 }
428
429 cJSON_ArrayForEach(item, displays)
430 {
431 if (!wmr_config_parse_display(c, item, log_level)) {
432 WMR_ERROR(log_level, "Error parsing Display entry");
433 return false;
434 }
435 }
436
437 cJSON *sensors = cJSON_GetObjectItemCaseSensitive(calib_info, "InertialSensors");
438 if (!cJSON_IsArray(sensors)) {
439 WMR_ERROR(log_level, "InertialSensors: not found or not an Array");
440 return false;
441 }
442
443 cJSON_ArrayForEach(item, sensors)
444 {
445 if (!wmr_inertial_sensors_config_parse(&c->sensors, item, log_level)) {
446 WMR_WARN(log_level, "Error parsing InertialSensor entry");
447 }
448 }
449
450 cJSON *cameras = cJSON_GetObjectItemCaseSensitive(calib_info, "Cameras");
451 if (!cJSON_IsArray(cameras)) {
452 WMR_ERROR(log_level, "Cameras: not found or not an Array");
453 return false;
454 }
455
456 cJSON_ArrayForEach(item, cameras)
457 {
458 if (!wmr_config_parse_camera_config(c, item, log_level))
459 return false;
460 }
461 c->slam_cam_count = MIN(c->tcam_count, (int)debug_get_num_option_wmr_max_slam_cams());
462
463 return true;
464}
465
466static bool
467wmr_controller_led_config_parse(struct wmr_led_config *l,
468 int index,
469 const cJSON *led_json,
470 enum u_logging_level log_level)
471{
472 float tmp[3];
473
474 cJSON *pos = cJSON_GetObjectItem(led_json, "Position");
475 if (pos == NULL || u_json_get_float_array(pos, tmp, 3) != 3) {
476 WMR_ERROR(log_level, "Missing or invalid position for controller LED %d", index);
477 return false;
478 }
479 l->pos.x = tmp[0];
480 l->pos.y = tmp[1];
481 l->pos.z = tmp[2];
482
483 cJSON *norm = cJSON_GetObjectItem(led_json, "Normal");
484 if (norm == NULL || u_json_get_float_array(norm, tmp, 3) != 3) {
485 WMR_ERROR(log_level, "Missing or invalid normal for controller LED %d", index);
486 return false;
487 }
488 l->norm.x = tmp[0];
489 l->norm.y = tmp[1];
490 l->norm.z = tmp[2];
491
492 return true;
493}
494
495bool
496wmr_hmd_config_parse(struct wmr_hmd_config *c, char *json_string, enum u_logging_level log_level)
497{
498 wmr_hmd_config_init_defaults(c);
499
500 cJSON *json_root = cJSON_Parse(json_string);
501 if (!cJSON_IsObject(json_root)) {
502 WMR_ERROR(log_level, "Could not parse JSON data.");
503 cJSON_Delete(json_root);
504 return false;
505 }
506
507 cJSON *calib_info = cJSON_GetObjectItemCaseSensitive(json_root, "CalibrationInformation");
508 if (!cJSON_IsObject(calib_info)) {
509 WMR_ERROR(log_level, "CalibrationInformation object not found");
510 cJSON_Delete(json_root);
511 return false;
512 }
513
514 bool res = wmr_config_parse_calibration(c, calib_info, log_level);
515
516 cJSON_Delete(json_root);
517 return res;
518}
519
520static void
521wmr_controller_config_init_defaults(struct wmr_controller_config *c)
522{
523 memset(c, 0, sizeof(struct wmr_controller_config));
524
525 // initialize default sensor transforms
526 math_pose_identity(&c->sensors.accel.pose);
527 math_pose_identity(&c->sensors.gyro.pose);
528 math_pose_identity(&c->sensors.mag.pose);
529
530 math_matrix_3x3_identity(&c->sensors.accel.mix_matrix);
531 math_matrix_3x3_identity(&c->sensors.gyro.mix_matrix);
532 math_matrix_3x3_identity(&c->sensors.mag.mix_matrix);
533}
534
535bool
536wmr_controller_config_parse(struct wmr_controller_config *c, char *json_string, enum u_logging_level log_level)
537{
538 cJSON *item = NULL;
539
540 wmr_controller_config_init_defaults(c);
541
542 cJSON *json_root = cJSON_Parse(json_string);
543 if (!cJSON_IsObject(json_root)) {
544 WMR_ERROR(log_level, "Could not parse JSON data.");
545 cJSON_Delete(json_root);
546 return false;
547 }
548
549 cJSON *calib_info = cJSON_GetObjectItemCaseSensitive(json_root, "CalibrationInformation");
550 if (!cJSON_IsObject(calib_info)) {
551 WMR_ERROR(log_level, "CalibrationInformation object not found");
552 cJSON_Delete(json_root);
553 return false;
554 }
555
556 cJSON *sensors = cJSON_GetObjectItemCaseSensitive(calib_info, "InertialSensors");
557 if (!cJSON_IsArray(sensors)) {
558 WMR_ERROR(log_level, "InertialSensors: not found or not an Array");
559 return false;
560 }
561
562 cJSON_ArrayForEach(item, sensors)
563 {
564 if (!wmr_inertial_sensors_config_parse(&c->sensors, item, log_level)) {
565 WMR_WARN(log_level, "Error parsing InertialSensor entry");
566 }
567 }
568
569 cJSON *leds = cJSON_GetObjectItemCaseSensitive(calib_info, "ControllerLeds");
570 if (!cJSON_IsArray(leds)) {
571 WMR_ERROR(log_level, "ControllerLeds: not found or not an Array");
572 return false;
573 }
574
575 cJSON_ArrayForEach(item, leds)
576 {
577 if (c->led_count == WMR_MAX_LEDS) {
578 WMR_ERROR(log_level, "Too many ControllerLed entries. Enlarge WMR_MAX_LEDS");
579 return false;
580 }
581
582 struct wmr_led_config *led_config = c->leds + c->led_count;
583
584 if (!wmr_controller_led_config_parse(led_config, c->led_count, item, log_level)) {
585 WMR_WARN(log_level, "Error parsing ControllerLed entry");
586 continue;
587 }
588
589 c->led_count++;
590 }
591
592 cJSON_Delete(json_root);
593
594 return true;
595}
596
597/*!
598 * Precompute transforms to convert between OpenXR and WMR coordinate systems.
599 *
600 * OpenXR: X: Right, Y: Up, Z: Backward
601 * WMR: X: Right, Y: Down, Z: Forward
602 * ┌────────────────────┐
603 * │ OXR WMR │
604 * │ │
605 * │ ▲ y │
606 * │ │ ▲ z │
607 * │ │ x │ x │
608 * │ ├──────► ├──────► │
609 * │ │ │ │
610 * │ ▼ z │ │
611 * │ ▼ y │
612 * └────────────────────┘
613 */
614void
615wmr_config_precompute_transforms(struct wmr_inertial_sensors_config *sensors,
616 struct wmr_distortion_eye_config *eye_params)
617{
618 // P_A_B is such that B = P_A_B * A. See conventions.md
619 struct xrt_pose P_oxr_wmr = {{.x = 1.0, .y = 0.0, .z = 0.0, .w = 0.0}, XRT_VEC3_ZERO};
620 struct xrt_pose P_wmr_oxr = {0};
621 struct xrt_pose P_acc_ht0 = sensors->accel.pose;
622 struct xrt_pose P_gyr_ht0 = sensors->gyro.pose;
623 struct xrt_pose P_ht0_acc = {0};
624 struct xrt_pose P_ht0_gyr = {0};
625 struct xrt_pose P_me_ht0 = {0}; // "me" == "middle of the eyes"
626 struct xrt_pose P_me_acc = {0};
627 struct xrt_pose P_me_gyr = {0};
628 struct xrt_pose P_ht0_me = {0};
629 struct xrt_pose P_acc_me = {0};
630 struct xrt_pose P_oxr_ht0_me = {0}; // P_ht0_me in OpenXR coordinates
631 struct xrt_pose P_oxr_acc_me = {0}; // P_acc_me in OpenXR coordinates
632
633 // All of the observed headsets have reported a zero translation for its gyro
634 assert(m_vec3_equal_exact(P_gyr_ht0.position, (struct xrt_vec3){0, 0, 0}));
635
636 // Initialize transforms
637
638 // All of these are in WMR coordinates.
639 math_pose_invert(&P_oxr_wmr, &P_wmr_oxr); // P_wmr_oxr == P_oxr_wmr
640 math_pose_invert(&P_acc_ht0, &P_ht0_acc);
641 math_pose_invert(&P_gyr_ht0, &P_ht0_gyr);
642 if (eye_params)
643 math_pose_interpolate(&eye_params[0].pose, &eye_params[1].pose, 0.5, &P_me_ht0);
644 else
645 math_pose_identity(&P_me_ht0);
646 math_pose_transform(&P_me_ht0, &P_ht0_acc, &P_me_acc);
647 math_pose_transform(&P_me_ht0, &P_ht0_gyr, &P_me_gyr);
648 math_pose_invert(&P_me_ht0, &P_ht0_me);
649 math_pose_invert(&P_me_acc, &P_acc_me);
650
651 // Express P_*_me pose in OpenXR coordinates through sandwich products.
652 math_pose_transform(&P_acc_me, &P_wmr_oxr, &P_oxr_acc_me);
653 math_pose_transform(&P_oxr_wmr, &P_oxr_acc_me, &P_oxr_acc_me);
654 math_pose_transform(&P_ht0_me, &P_wmr_oxr, &P_oxr_ht0_me);
655 math_pose_transform(&P_oxr_wmr, &P_oxr_ht0_me, &P_oxr_ht0_me);
656
657 // Save transforms
658 math_pose_transform(&P_oxr_wmr, &P_me_acc, &sensors->transforms.P_oxr_acc);
659 math_pose_transform(&P_oxr_wmr, &P_me_gyr, &sensors->transforms.P_oxr_gyr);
660 sensors->transforms.P_ht0_me = P_oxr_ht0_me;
661 sensors->transforms.P_imu_me = P_oxr_acc_me; // Assume accel pose is IMU pose
662}