The open source OpenXR runtime
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

d/wmr + a/util: Extract out WMR Poly3k distortion to shared place

Part-of: <https://gitlab.freedesktop.org/monado/monado/-/merge_requests/2590>

authored by

Beyley Cardellio and committed by
Marge Bot
b2ffa637 de6c4bff

+226 -213
+138
src/xrt/auxiliary/util/u_distortion_mesh.c
··· 401 401 402 402 /* 403 403 * 404 + * Windows Mixed Reality distortion 405 + * 406 + */ 407 + 408 + void 409 + u_compute_distortion_poly_3k( 410 + struct u_poly_3k_eye_values *values, uint32_t view, float u, float v, struct xrt_uv_triplet *result) 411 + { 412 + assert(view == 0 || view == 1); 413 + 414 + const struct xrt_matrix_3x3 *inv_affine_xform = &values->inv_affine_xform; 415 + 416 + // Results r/g/b. 417 + struct xrt_vec2 tc[3]; 418 + 419 + // Dear compiler, please vectorize. 420 + for (int channel = 0; channel < 3; channel++) { 421 + const struct xrt_vec2_i32 display_size = values->channels[channel].display_size; 422 + const struct xrt_vec2 eye_center = values->channels[channel].eye_center; 423 + const double *k = values->channels[channel].k; 424 + 425 + /* Scale the 0..1 input UV back to pixels relative to the distortion center, 426 + * accounting for the right eye starting at X = panel_width / 2.0 */ 427 + struct xrt_vec2 pix_coord = {(u + 1.0f * view) * (display_size.x / 2.0f) - eye_center.x, 428 + v * display_size.y - eye_center.y}; 429 + 430 + pix_coord.y += (float)values->y_offset; 431 + 432 + float r2 = m_vec2_dot(pix_coord, pix_coord); 433 + float k1 = (float)k[0]; 434 + float k2 = (float)k[1]; 435 + float k3 = (float)k[2]; 436 + 437 + float d = 1.0f + r2 * (k1 + r2 * (k2 + r2 * k3)); 438 + 439 + /* Map the distorted pixel coordinate back to normalised view plane coords using the inverse affine 440 + * xform */ 441 + struct xrt_vec3 p = {(pix_coord.x * d + eye_center.x), (pix_coord.y * d + eye_center.y), 1.0f}; 442 + struct xrt_vec3 vp; 443 + math_matrix_3x3_transform_vec3(inv_affine_xform, &p, &vp); 444 + 445 + /* Finally map back to the input texture 0..1 range based on the render FoV (from tex_N_range.x .. 446 + * tex_N_range.y) */ 447 + tc[channel].x = 448 + ((vp.x / vp.z) - values->tex_x_range.x) / (values->tex_x_range.y - values->tex_x_range.x); 449 + tc[channel].y = 450 + ((vp.y / vp.z) - values->tex_y_range.x) / (values->tex_y_range.y - values->tex_y_range.x); 451 + } 452 + 453 + result->r = tc[0]; 454 + result->g = tc[1]; 455 + result->b = tc[2]; 456 + } 457 + 458 + void 459 + u_compute_distortion_bounds_poly_3k(const struct xrt_matrix_3x3 *inv_affine_xform, 460 + struct u_poly_3k_distortion_values *values, 461 + int view, 462 + struct xrt_fov *out_fov, 463 + struct xrt_vec2 *out_tex_x_range, 464 + struct xrt_vec2 *out_tex_y_range) 465 + { 466 + assert(view == 0 || view == 1); 467 + 468 + float tanangle_left = 0.0f; 469 + float tanangle_right = 0.0f; 470 + float tanangle_up = 0.0f; 471 + float tanangle_down = 0.0f; 472 + 473 + for (int channel = 0; channel < 3; channel++) { 474 + const struct xrt_vec2 eye_center = values[channel].eye_center; 475 + const double *k = values[channel].k; 476 + 477 + /* The X coords start at 0 for the left eye, and display_size.x / 2.0 for the right */ 478 + const struct xrt_vec2 pix_coords[4] = { 479 + /* -eye_center_x, 0 */ 480 + {(1.0f * view) * (values->display_size.x / 2.0f) - eye_center.x, 0.0f}, 481 + /* 0, -eye_center_y */ 482 + {0.0f, -eye_center.y}, 483 + /* width-eye_center_x, 0 */ 484 + {(1.0f + 1.0f * view) * (values->display_size.x / 2.0f) - eye_center.x, 0.0f}, 485 + /* 0, height-eye_center_y */ 486 + {0.0f, values->display_size.y - eye_center.y}, 487 + }; 488 + 489 + for (int c = 0; c < 4; c++) { 490 + const struct xrt_vec2 pix_coord = pix_coords[c]; 491 + 492 + float k1 = (float)k[0]; 493 + float k2 = (float)k[1]; 494 + float k3 = (float)k[2]; 495 + 496 + float r2 = m_vec2_dot(pix_coord, pix_coord); 497 + 498 + /* distort the pixel */ 499 + float d = 1.0f + r2 * (k1 + r2 * (k2 + r2 * k3)); 500 + 501 + /* Map the distorted pixel coordinate back to normalised view plane coords using the inverse 502 + * affine xform */ 503 + struct xrt_vec3 p = {(pix_coord.x * d + eye_center.x), (pix_coord.y * d + eye_center.y), 1.0f}; 504 + struct xrt_vec3 vp; 505 + 506 + math_matrix_3x3_transform_vec3(inv_affine_xform, &p, &vp); 507 + vp.x /= vp.z; 508 + vp.y /= vp.z; 509 + 510 + if (pix_coord.x < 0.0f) { 511 + if (vp.x < tanangle_left) 512 + tanangle_left = vp.x; 513 + } else { 514 + if (vp.x > tanangle_right) 515 + tanangle_right = vp.x; 516 + } 517 + 518 + if (pix_coord.y < 0.0f) { 519 + if (vp.y < tanangle_up) 520 + tanangle_up = vp.y; 521 + } else { 522 + if (vp.y > tanangle_down) 523 + tanangle_down = vp.y; 524 + } 525 + } 526 + } 527 + 528 + out_fov->angle_left = atanf(tanangle_left); 529 + out_fov->angle_right = atanf(tanangle_right); 530 + out_fov->angle_down = -atanf(tanangle_down); 531 + out_fov->angle_up = -atanf(tanangle_up); 532 + 533 + out_tex_x_range->x = tanf(out_fov->angle_left); 534 + out_tex_x_range->y = tanf(out_fov->angle_right); 535 + out_tex_y_range->x = tanf(out_fov->angle_down); 536 + out_tex_y_range->y = tanf(out_fov->angle_up); 537 + } 538 + 539 + 540 + /* 541 + * 404 542 * No distortion. 405 543 * 406 544 */
+51
src/xrt/auxiliary/util/u_distortion_mesh.h
··· 157 157 u_compute_distortion_ns_meshgrid( 158 158 struct u_ns_meshgrid_values *values, int view, float u, float v, struct xrt_uv_triplet *result); 159 159 160 + /* 161 + * 162 + * Windows Mixed Reality distortion 163 + * 164 + */ 165 + 166 + struct u_poly_3k_distortion_values 167 + { 168 + struct xrt_vec2_i32 display_size; 169 + 170 + /* X/Y center of the distortion (pixels) */ 171 + struct xrt_vec2 eye_center; 172 + 173 + /* k1,k2,k3 params for radial distortion as 174 + * per the radial distortion model in 175 + * https://docs.opencv.org/4.x/d9/d0c/group__calib3d.html */ 176 + double k[3]; 177 + }; 178 + 179 + struct u_poly_3k_eye_values 180 + { 181 + //! Inverse affine transform to move from (undistorted) pixels 182 + //! to image plane / normalised image coordinates 183 + struct xrt_matrix_3x3 inv_affine_xform; 184 + 185 + //! tan(angle) FoV min/max for X and Y in the input texture 186 + struct xrt_vec2 tex_x_range; 187 + struct xrt_vec2 tex_y_range; 188 + 189 + //! Hack values for WMR devices with weird distortions 190 + int32_t y_offset; 191 + 192 + struct u_poly_3k_distortion_values channels[3]; 193 + }; 194 + 195 + void 196 + u_compute_distortion_poly_3k( 197 + struct u_poly_3k_eye_values *values, uint32_t view, float u, float v, struct xrt_uv_triplet *result); 198 + 199 + /* 200 + * Compute the visible area bounds by calculating the X/Y limits of a 201 + * crosshair through the distortion center, and back-project to the render FoV, 202 + */ 203 + void 204 + u_compute_distortion_bounds_poly_3k(const struct xrt_matrix_3x3 *inv_affine_xform, 205 + struct u_poly_3k_distortion_values *values, 206 + int view, 207 + struct xrt_fov *out_fov, 208 + struct xrt_vec2 *out_tex_x_range, 209 + struct xrt_vec2 *out_tex_y_range); 210 + 160 211 161 212 /* 162 213 *
+13 -12
src/xrt/drivers/wmr/wmr_config.c
··· 90 90 return false; 91 91 } 92 92 93 + struct xrt_matrix_3x3 affine_xform; 94 + 93 95 /* Extract display panel parameters */ 94 96 cJSON *affine = cJSON_GetObjectItem(display, "Affine"); 95 - if (affine == NULL || u_json_get_float_array(affine, eye->affine_xform.v, 9) != 9) { 97 + if (affine == NULL || u_json_get_float_array(affine, affine_xform.v, 9) != 9) { 96 98 WMR_ERROR(log_level, "Missing affine transform for AssignedEye \"%s\"", json_eye_name); 97 99 return false; 98 100 } 101 + 102 + math_matrix_3x3_inverse(&affine_xform, &eye->poly_3k.inv_affine_xform); 99 103 100 104 if (!JSON_FLOAT(display, "DisplayWidth", &eye->display_size.x) || 101 105 !JSON_FLOAT(display, "DisplayHeight", &eye->display_size.y)) ··· 133 137 const char *channel_names[] = {"DistortionRed", "DistortionGreen", "DistortionBlue"}; 134 138 135 139 for (int channel = 0; channel < 3; ++channel) { 136 - struct wmr_distortion_3K *distortion3K = &eye->distortion3K[channel]; 140 + struct u_poly_3k_eye_values *distortion3K = &eye->poly_3k; 137 141 138 142 cJSON *dist = cJSON_GetObjectItemCaseSensitive(display, channel_names[channel]); 139 143 if (!dist) { ··· 147 151 return false; 148 152 } 149 153 150 - if (!strcmp(model_type, "CALIBRATION_DisplayDistortionModelPolynomial3K")) { 151 - distortion3K->model = WMR_DISTORTION_MODEL_POLYNOMIAL_3K; 152 - } else { 153 - distortion3K->model = WMR_DISTORTION_MODEL_UNKNOWN; 154 - WMR_ERROR(log_level, "Unknown distortion model %s", model_type); 154 + if (strcmp(model_type, "CALIBRATION_DisplayDistortionModelPolynomial3K") != 0) { 155 + WMR_ERROR(log_level, "Unknown display distortion model %s", model_type); 155 156 return false; 156 157 } 157 158 ··· 170 171 return false; 171 172 } 172 173 173 - distortion3K->eye_center.x = parameters[0]; 174 - distortion3K->eye_center.y = parameters[1]; 174 + distortion3K->channels[channel].eye_center.x = parameters[0]; 175 + distortion3K->channels[channel].eye_center.y = parameters[1]; 175 176 176 - distortion3K->k[0] = parameters[2]; 177 - distortion3K->k[1] = parameters[3]; 178 - distortion3K->k[2] = parameters[4]; 177 + distortion3K->channels[channel].k[0] = parameters[2]; 178 + distortion3K->channels[channel].k[1] = parameters[3]; 179 + distortion3K->channels[channel].k[2] = parameters[4]; 179 180 } 180 181 181 182 return true;
+3 -17
src/xrt/drivers/wmr/wmr_config.h
··· 12 12 13 13 #include "math/m_vec2.h" 14 14 #include "math/m_vec3.h" 15 + 15 16 #include "util/u_logging.h" 17 + #include "util/u_distortion_mesh.h" 16 18 17 19 /* Increase this number if anyone releases a headset with 18 20 * more cameras */ ··· 50 52 WMR_CAMERA_PURPOSE_DISPLAY_OBSERVER, 51 53 }; 52 54 53 - struct wmr_distortion_3K 54 - { 55 - enum wmr_distortion_model model; 56 - 57 - /* X/Y center of the distortion (pixels) */ 58 - struct xrt_vec2 eye_center; 59 - /* k1,k2,k3 params for radial distortion as 60 - * per the radial distortion model in 61 - * https://docs.opencv.org/4.x/d9/d0c/group__calib3d.html */ 62 - double k[3]; 63 - }; 64 - 65 55 struct wmr_distortion_6KT 66 56 { 67 57 enum wmr_distortion_model model; ··· 83 73 84 74 struct wmr_distortion_eye_config 85 75 { 86 - /* 3x3 camera matrix that moves from normalised camera coords (X/Z & Y/Z) to undistorted pixels */ 87 - struct xrt_matrix_3x3 affine_xform; 88 - 89 76 struct xrt_vec3 translation; //!< Raw translation (to HT0) 90 77 struct xrt_matrix_3x3 rotation; //!< Raw rotation (to HT0), row major 91 78 struct xrt_pose pose; //!< Pose from `translation` and `rotation` ··· 98 85 /* Center for the eye viewport visibility (pixels) */ 99 86 struct xrt_vec2 visible_center; 100 87 101 - /* RGB distortion params */ 102 - struct wmr_distortion_3K distortion3K[3]; 88 + struct u_poly_3k_eye_values poly_3k; //!< Distortion parameters for each channel 103 89 }; 104 90 105 91 struct wmr_camera_config
+21 -168
src/xrt/drivers/wmr/wmr_hmd.c
··· 1247 1247 u_device_free(&wh->base); 1248 1248 } 1249 1249 1250 - static xrt_result_t 1251 - compute_distortion_wmr(struct xrt_device *xdev, uint32_t view, float u, float v, struct xrt_uv_triplet *result) 1252 - { 1253 - DRV_TRACE_MARKER(); 1254 - 1255 - struct wmr_hmd *wh = wmr_hmd(xdev); 1256 - 1257 - assert(view == 0 || view == 1); 1258 - 1259 - const struct wmr_distortion_eye_config *ec = wh->config.eye_params + view; 1260 - struct wmr_hmd_distortion_params *distortion_params = wh->distortion_params + view; 1261 - 1262 - // Results r/g/b. 1263 - struct xrt_vec2 tc[3]; 1264 - 1265 - // Dear compiler, please vectorize. 1266 - for (int i = 0; i < 3; i++) { 1267 - const struct wmr_distortion_3K *distortion3K = ec->distortion3K + i; 1268 - 1269 - /* Scale the 0..1 input UV back to pixels relative to the distortion center, 1270 - * accounting for the right eye starting at X = panel_width / 2.0 */ 1271 - struct xrt_vec2 pix_coord = {(u + 1.0f * view) * (ec->display_size.x / 2.0f) - 1272 - distortion3K->eye_center.x, 1273 - v * ec->display_size.y - distortion3K->eye_center.y}; 1274 - 1275 - if (view == 0) { 1276 - pix_coord.y += (float)wh->left_view_y_offset; 1277 - } else if (view == 1) { 1278 - pix_coord.y += (float)wh->right_view_y_offset; 1279 - } 1280 - 1281 - float r2 = m_vec2_dot(pix_coord, pix_coord); 1282 - float k1 = (float)distortion3K->k[0]; 1283 - float k2 = (float)distortion3K->k[1]; 1284 - float k3 = (float)distortion3K->k[2]; 1285 - 1286 - float d = 1.0f + r2 * (k1 + r2 * (k2 + r2 * k3)); 1287 - 1288 - /* Map the distorted pixel coordinate back to normalised view plane coords using the inverse affine 1289 - * xform */ 1290 - struct xrt_vec3 p = {(pix_coord.x * d + distortion3K->eye_center.x), 1291 - (pix_coord.y * d + distortion3K->eye_center.y), 1.0f}; 1292 - struct xrt_vec3 vp; 1293 - math_matrix_3x3_transform_vec3(&distortion_params->inv_affine_xform, &p, &vp); 1294 - 1295 - /* Finally map back to the input texture 0..1 range based on the render FoV (from tex_N_range.x .. 1296 - * tex_N_range.y) */ 1297 - tc[i].x = ((vp.x / vp.z) - distortion_params->tex_x_range.x) / 1298 - (distortion_params->tex_x_range.y - distortion_params->tex_x_range.x); 1299 - tc[i].y = ((vp.y / vp.z) - distortion_params->tex_y_range.x) / 1300 - (distortion_params->tex_y_range.y - distortion_params->tex_y_range.x); 1301 - } 1302 - 1303 - result->r = tc[0]; 1304 - result->g = tc[1]; 1305 - result->b = tc[2]; 1306 - 1307 - return XRT_SUCCESS; 1308 - } 1309 - 1310 - /* 1311 - * Compute the visible area bounds by calculating the X/Y limits of a 1312 - * crosshair through the distortion center, and back-project to the render FoV, 1313 - */ 1314 - static void 1315 - compute_distortion_bounds(struct wmr_hmd *wh, 1316 - int view, 1317 - float *out_angle_left, 1318 - float *out_angle_right, 1319 - float *out_angle_down, 1320 - float *out_angle_up) 1321 - { 1322 - DRV_TRACE_MARKER(); 1323 - 1324 - assert(view == 0 || view == 1); 1325 - 1326 - float tanangle_left = 0.0f; 1327 - float tanangle_right = 0.0f; 1328 - float tanangle_up = 0.0f; 1329 - float tanangle_down = 0.0f; 1330 - 1331 - const struct wmr_distortion_eye_config *ec = wh->config.eye_params + view; 1332 - struct wmr_hmd_distortion_params *distortion_params = wh->distortion_params + view; 1333 - 1334 - for (int i = 0; i < 3; i++) { 1335 - const struct wmr_distortion_3K *distortion3K = ec->distortion3K + i; 1336 - 1337 - /* The X coords start at 0 for the left eye, and display_size.x / 2.0 for the right */ 1338 - const struct xrt_vec2 pix_coords[4] = { 1339 - /* -eye_center_x, 0 */ 1340 - {(1.0f * view) * (ec->display_size.x / 2.0f) - distortion3K->eye_center.x, 0.0f}, 1341 - /* 0, -eye_center_y */ 1342 - {0.0f, -distortion3K->eye_center.y}, 1343 - /* width-eye_center_x, 0 */ 1344 - {(1.0f + 1.0f * view) * (ec->display_size.x / 2.0f) - distortion3K->eye_center.x, 0.0f}, 1345 - /* 0, height-eye_center_y */ 1346 - {0.0f, ec->display_size.y - distortion3K->eye_center.y}, 1347 - }; 1348 - 1349 - for (int c = 0; c < 4; c++) { 1350 - const struct xrt_vec2 pix_coord = pix_coords[c]; 1351 - 1352 - float k1 = distortion3K->k[0]; 1353 - float k2 = distortion3K->k[1]; 1354 - float k3 = distortion3K->k[2]; 1355 - 1356 - float r2 = m_vec2_dot(pix_coord, pix_coord); 1357 - 1358 - /* distort the pixel */ 1359 - float d = 1.0f + r2 * (k1 + r2 * (k2 + r2 * k3)); 1360 - 1361 - /* Map the distorted pixel coordinate back to normalised view plane coords using the inverse 1362 - * affine xform */ 1363 - struct xrt_vec3 p = {(pix_coord.x * d + distortion3K->eye_center.x), 1364 - (pix_coord.y * d + distortion3K->eye_center.y), 1.0f}; 1365 - struct xrt_vec3 vp; 1366 - 1367 - math_matrix_3x3_transform_vec3(&distortion_params->inv_affine_xform, &p, &vp); 1368 - vp.x /= vp.z; 1369 - vp.y /= vp.z; 1370 - 1371 - if (pix_coord.x < 0.0f) { 1372 - if (vp.x < tanangle_left) 1373 - tanangle_left = vp.x; 1374 - } else { 1375 - if (vp.x > tanangle_right) 1376 - tanangle_right = vp.x; 1377 - } 1378 - 1379 - if (pix_coord.y < 0.0f) { 1380 - if (vp.y < tanangle_up) 1381 - tanangle_up = vp.y; 1382 - } else { 1383 - if (vp.y > tanangle_down) 1384 - tanangle_down = vp.y; 1385 - } 1386 - 1387 - WMR_DEBUG(wh, "channel %d delta coord %f, %f d pixel %f %f, %f -> %f, %f", i, pix_coord.x, 1388 - pix_coord.y, d, p.x, p.y, vp.x, vp.y); 1389 - } 1390 - } 1391 - 1392 - *out_angle_left = atanf(tanangle_left); 1393 - *out_angle_right = atanf(tanangle_right); 1394 - *out_angle_down = -atanf(tanangle_down); 1395 - *out_angle_up = -atanf(tanangle_up); 1396 - } 1397 - 1398 1250 XRT_MAYBE_UNUSED static struct t_camera_calibration 1399 1251 wmr_hmd_get_cam_calib(struct wmr_hmd *wh, int cam_index) 1400 1252 { ··· 1928 1780 return wmr_hmd_send_controller_packet(wh, cmd, sizeof(cmd)); 1929 1781 } 1930 1782 1783 + static xrt_result_t 1784 + compute_distortion_wmr(struct xrt_device *xdev, uint32_t view, float u, float v, struct xrt_uv_triplet *out_result) 1785 + { 1786 + struct wmr_hmd *wh = wmr_hmd(xdev); 1787 + 1788 + u_compute_distortion_poly_3k(&wh->config.eye_params[view].poly_3k, view, u, v, out_result); 1789 + 1790 + return XRT_SUCCESS; 1791 + } 1792 + 1931 1793 void 1932 1794 wmr_hmd_create(enum wmr_headset_type hmd_type, 1933 1795 struct os_hid_device *hid_holo, ··· 1960 1822 wh->base.name = XRT_DEVICE_GENERIC_HMD; 1961 1823 wh->base.device_type = XRT_DEVICE_TYPE_HMD; 1962 1824 wh->log_level = log_level; 1963 - 1964 - wh->left_view_y_offset = debug_get_num_option_left_view_y_offset(); 1965 - wh->right_view_y_offset = debug_get_num_option_right_view_y_offset(); 1966 1825 1967 1826 wh->hid_hololens_sensors_dev = hid_holo; 1968 1827 wh->hid_control_dev = hid_ctrl; ··· 2065 1924 wh->base.hmd->blend_modes[idx++] = XRT_BLEND_MODE_OPAQUE; 2066 1925 wh->base.hmd->blend_mode_count = idx; 2067 1926 1927 + wh->config.eye_params[0].poly_3k.y_offset = debug_get_num_option_left_view_y_offset(); 1928 + wh->config.eye_params[1].poly_3k.y_offset = debug_get_num_option_right_view_y_offset(); 1929 + 2068 1930 // Distortion information, fills in xdev->compute_distortion(). 2069 1931 for (eye = 0; eye < 2; eye++) { 2070 - math_matrix_3x3_inverse(&wh->config.eye_params[eye].affine_xform, 2071 - &wh->distortion_params[eye].inv_affine_xform); 2072 - 2073 - compute_distortion_bounds(wh, eye, &wh->base.hmd->distortion.fov[eye].angle_left, 2074 - &wh->base.hmd->distortion.fov[eye].angle_right, 2075 - &wh->base.hmd->distortion.fov[eye].angle_down, 2076 - &wh->base.hmd->distortion.fov[eye].angle_up); 1932 + struct xrt_fov *fov = &wh->base.hmd->distortion.fov[eye]; 1933 + struct u_poly_3k_eye_values *poly_3k = &wh->config.eye_params[eye].poly_3k; 2077 1934 2078 - WMR_INFO(wh, "FoV eye %d angles left %f right %f down %f up %f", eye, 2079 - wh->base.hmd->distortion.fov[eye].angle_left, wh->base.hmd->distortion.fov[eye].angle_right, 2080 - wh->base.hmd->distortion.fov[eye].angle_down, wh->base.hmd->distortion.fov[eye].angle_up); 1935 + u_compute_distortion_bounds_poly_3k(&poly_3k->inv_affine_xform, poly_3k->channels, eye, fov, 1936 + &poly_3k->tex_x_range, &poly_3k->tex_y_range); 2081 1937 2082 - wh->distortion_params[eye].tex_x_range.x = tanf(wh->base.hmd->distortion.fov[eye].angle_left); 2083 - wh->distortion_params[eye].tex_x_range.y = tanf(wh->base.hmd->distortion.fov[eye].angle_right); 2084 - wh->distortion_params[eye].tex_y_range.x = tanf(wh->base.hmd->distortion.fov[eye].angle_down); 2085 - wh->distortion_params[eye].tex_y_range.y = tanf(wh->base.hmd->distortion.fov[eye].angle_up); 1938 + WMR_INFO(wh, "FoV eye %d angles left %f right %f down %f up %f", eye, fov->angle_left, fov->angle_right, 1939 + fov->angle_down, fov->angle_up); 2086 1940 2087 - WMR_INFO(wh, "Render texture range %f, %f to %f, %f", wh->distortion_params[eye].tex_x_range.x, 2088 - wh->distortion_params[eye].tex_y_range.x, wh->distortion_params[eye].tex_x_range.y, 2089 - wh->distortion_params[eye].tex_y_range.y); 1941 + WMR_INFO(wh, "Render texture range %f, %f to %f, %f", poly_3k->tex_x_range.x, poly_3k->tex_y_range.x, 1942 + poly_3k->tex_x_range.y, poly_3k->tex_y_range.y); 2090 1943 } 2091 1944 2092 1945 wh->base.hmd->distortion.models = XRT_DISTORTION_MODEL_COMPUTE;
-16
src/xrt/drivers/wmr/wmr_hmd.h
··· 54 54 void (*screen_enable_func)(struct wmr_hmd *wh, bool enable); 55 55 }; 56 56 57 - struct wmr_hmd_distortion_params 58 - { 59 - //! Inverse affine transform to move from (undistorted) pixels 60 - //! to image plane / normalised image coordinates 61 - struct xrt_matrix_3x3 inv_affine_xform; 62 - 63 - //! tan(angle) FoV min/max for X and Y in the input texture 64 - struct xrt_vec2 tex_x_range; 65 - struct xrt_vec2 tex_y_range; 66 - }; 67 - 68 57 /*! 69 58 * @implements xrt_device 70 59 */ ··· 84 73 struct os_thread_helper oth; 85 74 86 75 enum u_logging_level log_level; 87 - 88 - int32_t left_view_y_offset, right_view_y_offset; 89 76 90 77 /*! 91 78 * This is the Hololens Sensors device, this is where we get all of the ··· 113 100 uint16_t raw_ipd; 114 101 //! Latest proximity sensor value read from the device. 115 102 uint8_t proximity_sensor; 116 - 117 - //! Distortion related parameters 118 - struct wmr_hmd_distortion_params distortion_params[2]; 119 103 120 104 struct hololens_sensors_packet packet; 121 105