The open source OpenXR runtime
0
fork

Configure Feed

Select the types of activity you want to include in your feed.

c/compositor: support compensation for rolling scanout hmds

Currently only the compute renderer performs compensation
Currenlty only a top to bottom scanout direction is compensated
adds information on scanout to xrt_hmd_parts

Signed-off-by: Carl Philipp Klemm <carl@uvos.xyz>
Part-of: <https://gitlab.freedesktop.org/monado/monado/-/merge_requests/2596>

authored by

Carl Philipp Klemm and committed by
Marge Bot
ef0f552c 35348e63

+595 -401
+88 -37
src/xrt/compositor/main/comp_renderer.c
··· 242 242 calc_pose_data(struct comp_renderer *r, 243 243 enum comp_target_fov_source fov_source, 244 244 struct xrt_fov out_fovs[XRT_MAX_VIEWS], 245 - struct xrt_pose out_world[XRT_MAX_VIEWS], 245 + struct xrt_pose out_world_scanout_begin[XRT_MAX_VIEWS], 246 + struct xrt_pose out_world_scanout_end[XRT_MAX_VIEWS], 246 247 struct xrt_pose out_eye[XRT_MAX_VIEWS], 247 248 uint32_t view_count) 248 249 { ··· 254 255 0.0f, 255 256 }; 256 257 257 - struct xrt_space_relation head_relation = XRT_SPACE_RELATION_ZERO; 258 + struct xrt_space_relation head_relation[2] = XRT_SPACE_RELATION_ZERO; 258 259 struct xrt_fov xdev_fovs[XRT_MAX_VIEWS] = XRT_STRUCT_INIT; 259 - struct xrt_pose xdev_poses[XRT_MAX_VIEWS] = XRT_STRUCT_INIT; 260 + struct xrt_pose xdev_poses[2][XRT_MAX_VIEWS] = XRT_STRUCT_INIT; 260 261 261 - xrt_result_t xret = xrt_device_get_view_poses( // 262 - r->c->xdev, // 263 - &default_eye_relation, // 264 - r->c->frame.rendering.predicted_display_time_ns, // at_timestamp_ns 265 - view_count, // 266 - &head_relation, // out_head_relation 267 - xdev_fovs, // out_fovs 268 - xdev_poses); // out_poses 262 + uint64_t scanout_time_ns = 0; 263 + if (r->c->xdev->hmd->screens[0].scanout_direction == XRT_SCANOUT_DIRECTION_TOP_TO_BOTTOM) { 264 + scanout_time_ns = r->c->xdev->hmd->screens[0].scanout_time_ns; 265 + } else if (r->c->xdev->hmd->screens[0].scanout_direction != XRT_SCANOUT_DIRECTION_NONE) { 266 + COMP_SPEW(r->c, "Unable to apply scanout compensation as only DIRECTION_TOP_TO_BOTTOM is supported"); 267 + } 268 + 269 + int64_t begin_timestamp_ns = r->c->frame.rendering.predicted_display_time_ns; 270 + int64_t end_timestamp_ns = begin_timestamp_ns + scanout_time_ns; 271 + 272 + // Pose at beginning of scanout 273 + xrt_result_t xret = xrt_device_get_view_poses( // 274 + r->c->xdev, // 275 + &default_eye_relation, // 276 + begin_timestamp_ns, // at_timestamp_ns 277 + view_count, // 278 + &head_relation[0], // out_head_relation 279 + xdev_fovs, // out_fovs 280 + xdev_poses[0]); 269 281 if (xret != XRT_SUCCESS) { 270 282 struct u_pp_sink_stack_only sink; 271 283 u_pp_delegate_t dg = u_pp_sink_stack_only_init(&sink); ··· 274 286 return; 275 287 } 276 288 289 + // Pose at end of scanout 290 + if (scanout_time_ns != 0) { 291 + xret = xrt_device_get_view_poses( // 292 + r->c->xdev, // 293 + &default_eye_relation, // 294 + end_timestamp_ns, // at_timestamp_ns 295 + view_count, // 296 + &head_relation[1], // out_head_relation 297 + xdev_fovs, // out_fovs 298 + xdev_poses[1]); // out_poses 299 + if (xret != XRT_SUCCESS) { 300 + struct u_pp_sink_stack_only sink; 301 + u_pp_delegate_t dg = u_pp_sink_stack_only_init(&sink); 302 + u_pp_xrt_result(dg, xret); 303 + U_LOG_E("xrt_device_get_view_poses failed: %s", sink.buffer); 304 + return; 305 + } 306 + } else { 307 + for (size_t i = 0; i < XRT_MAX_VIEWS; ++i) { 308 + xdev_poses[1][i] = xdev_poses[0][i]; 309 + } 310 + head_relation[1] = head_relation[0]; 311 + } 312 + 277 313 struct xrt_fov dist_fov[XRT_MAX_VIEWS] = XRT_STRUCT_INIT; 278 314 for (uint32_t i = 0; i < view_count; i++) { 279 315 dist_fov[i] = r->c->xdev->hmd->distortion.fov[i]; ··· 288 324 289 325 for (uint32_t i = 0; i < view_count; i++) { 290 326 const struct xrt_fov fov = use_xdev ? xdev_fovs[i] : dist_fov[i]; 291 - const struct xrt_pose eye_pose = xdev_poses[i]; 327 + const struct xrt_pose eye_pose_scanout_start = xdev_poses[0][i]; 328 + const struct xrt_pose eye_pose_scanout_end = xdev_poses[1][i]; 292 329 293 - struct xrt_space_relation result = {0}; 330 + struct xrt_space_relation result_scanout_start = {0}; 331 + struct xrt_space_relation result_scanout_end = {0}; 294 332 struct xrt_relation_chain xrc = {0}; 295 - m_relation_chain_push_pose_if_not_identity(&xrc, &eye_pose); 296 - m_relation_chain_push_relation(&xrc, &head_relation); 297 - m_relation_chain_resolve(&xrc, &result); 333 + 334 + m_relation_chain_push_pose_if_not_identity(&xrc, &eye_pose_scanout_start); 335 + m_relation_chain_push_relation(&xrc, &head_relation[0]); 336 + m_relation_chain_resolve(&xrc, &result_scanout_start); 337 + 338 + xrc = (struct xrt_relation_chain){0}; 339 + 340 + m_relation_chain_push_pose_if_not_identity(&xrc, &eye_pose_scanout_end); 341 + m_relation_chain_push_relation(&xrc, &head_relation[1]); 342 + m_relation_chain_resolve(&xrc, &result_scanout_end); 298 343 299 344 // Results to callers. 300 345 out_fovs[i] = fov; 301 - out_world[i] = result.pose; 302 - out_eye[i] = eye_pose; 346 + out_world_scanout_begin[i] = result_scanout_start.pose; 347 + out_world_scanout_end[i] = result_scanout_end.pose; 348 + out_eye[i] = eye_pose_scanout_start; 303 349 304 350 // For remote rendering targets. 305 351 r->c->base.frame_params.fovs[i] = fov; 306 - r->c->base.frame_params.poses[i] = result.pose; 352 + r->c->base.frame_params.poses[i] = result_scanout_start.pose; 307 353 } 308 354 } 309 355 ··· 866 912 867 913 // Device view information. 868 914 struct xrt_fov fovs[XRT_MAX_VIEWS]; 869 - struct xrt_pose world_poses[XRT_MAX_VIEWS]; 915 + struct xrt_pose world_poses_scanout_begin[XRT_MAX_VIEWS]; 916 + struct xrt_pose world_poses_scanout_end[XRT_MAX_VIEWS]; 870 917 struct xrt_pose eye_poses[XRT_MAX_VIEWS]; 871 - calc_pose_data( // 872 - r, // 873 - fov_source, // 874 - fovs, // 875 - world_poses, // 876 - eye_poses, // 877 - render->r->view_count); // 918 + calc_pose_data( // 919 + r, // 920 + fov_source, // 921 + fovs, // 922 + world_poses_scanout_begin, // 923 + world_poses_scanout_end, // 924 + eye_poses, // 925 + render->r->view_count); // 878 926 879 927 // Does everything. 880 928 chl_frame_state_gfx_default_pipeline( // ··· 882 930 render, // 883 931 layers, // 884 932 layer_count, // 885 - world_poses, // 933 + world_poses_scanout_begin, // 886 934 eye_poses, // 887 935 fovs, // 888 936 rtr, // ··· 924 972 925 973 // Device view information. 926 974 struct xrt_fov fovs[XRT_MAX_VIEWS]; 927 - struct xrt_pose world_poses[XRT_MAX_VIEWS]; 975 + struct xrt_pose world_poses_scanout_begin[XRT_MAX_VIEWS]; 976 + struct xrt_pose world_poses_scanout_end[XRT_MAX_VIEWS]; 928 977 struct xrt_pose eye_poses[XRT_MAX_VIEWS]; 929 - calc_pose_data( // 930 - r, // 931 - fov_source, // 932 - fovs, // 933 - world_poses, // 934 - eye_poses, // 935 - render->r->view_count); // 978 + calc_pose_data( // 979 + r, // 980 + fov_source, // 981 + fovs, // 982 + world_poses_scanout_begin, // 983 + world_poses_scanout_end, // 984 + eye_poses, // 985 + render->r->view_count); // 936 986 937 987 // Target Vulkan resources.. 938 988 VkImage target_image = r->c->target->images[r->acquired_buffer].handle; ··· 948 998 render, // 949 999 layers, // 950 1000 layer_count, // 951 - world_poses, // 1001 + world_poses_scanout_begin, // 1002 + world_poses_scanout_end, // 952 1003 eye_poses, // 953 1004 fovs, // 954 1005 target_image, //
+190 -209
src/xrt/compositor/render/render_compute.c
··· 278 278 NULL); // pDescriptorCopies 279 279 } 280 280 281 + static void 282 + dispatch_project_pipeline(struct render_compute *render, 283 + VkSampler src_samplers[XRT_MAX_VIEWS], 284 + VkImageView src_image_views[XRT_MAX_VIEWS], 285 + const struct xrt_normalized_rect src_norm_rects[XRT_MAX_VIEWS], 286 + VkImage target_image, 287 + VkImageView target_image_view, 288 + const struct render_viewport_data views[XRT_MAX_VIEWS], 289 + VkPipeline pipeline) 290 + { 291 + struct vk_bundle *vk = vk_from_render(render); 292 + struct render_resources *r = render->r; 293 + 294 + 295 + /* 296 + * UBO 297 + */ 298 + 299 + struct render_compute_distortion_ubo_data *data = 300 + (struct render_compute_distortion_ubo_data *)r->compute.distortion.ubo.mapped; 301 + for (uint32_t i = 0; i < render->r->view_count; ++i) { 302 + data->views[i] = views[i]; 303 + data->post_transforms[i] = src_norm_rects[i]; 304 + } 305 + 306 + 307 + /* 308 + * Source, target and distortion images. 309 + */ 310 + 311 + VkImageSubresourceRange subresource_range = { 312 + .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, 313 + .baseMipLevel = 0, 314 + .levelCount = VK_REMAINING_MIP_LEVELS, 315 + .baseArrayLayer = 0, 316 + .layerCount = VK_REMAINING_ARRAY_LAYERS, 317 + }; 318 + 319 + vk_cmd_image_barrier_gpu_locked( // 320 + vk, // 321 + r->cmd, // 322 + target_image, // 323 + 0, // 324 + VK_ACCESS_SHADER_WRITE_BIT, // 325 + VK_IMAGE_LAYOUT_UNDEFINED, // 326 + VK_IMAGE_LAYOUT_GENERAL, // 327 + subresource_range); // 328 + 329 + VkSampler sampler = r->samplers.clamp_to_edge; 330 + VkSampler distortion_samplers[3 * XRT_MAX_VIEWS]; 331 + for (uint32_t i = 0; i < render->r->view_count; ++i) { 332 + distortion_samplers[3 * i + 0] = sampler; 333 + distortion_samplers[3 * i + 1] = sampler; 334 + distortion_samplers[3 * i + 2] = sampler; 335 + } 336 + 337 + update_compute_shared_descriptor_set( // 338 + vk, // 339 + r->compute.src_binding, // 340 + src_samplers, // 341 + src_image_views, // 342 + r->compute.distortion_binding, // 343 + distortion_samplers, // 344 + r->distortion.image_views, // 345 + r->compute.target_binding, // 346 + target_image_view, // 347 + r->compute.ubo_binding, // 348 + r->compute.distortion.ubo.buffer, // 349 + VK_WHOLE_SIZE, // 350 + render->shared_descriptor_set, // 351 + render->r->view_count); // 352 + 353 + vk->vkCmdBindPipeline( // 354 + r->cmd, // 355 + VK_PIPELINE_BIND_POINT_COMPUTE, // pipelineBindPoint 356 + pipeline); // pipeline 357 + 358 + vk->vkCmdBindDescriptorSets( // 359 + r->cmd, // 360 + VK_PIPELINE_BIND_POINT_COMPUTE, // pipelineBindPoint 361 + r->compute.distortion.pipeline_layout, // layout 362 + 0, // firstSet 363 + 1, // descriptorSetCount 364 + &render->shared_descriptor_set, // pDescriptorSets 365 + 0, // dynamicOffsetCount 366 + NULL); // pDynamicOffsets 367 + 368 + 369 + uint32_t w = 0, h = 0; 370 + calc_dispatch_dims_views(views, render->r->view_count, &w, &h); 371 + assert(w != 0 && h != 0); 372 + 373 + vk->vkCmdDispatch( // 374 + r->cmd, // 375 + w, // groupCountX 376 + h, // groupCountY 377 + 2); // groupCountZ 378 + 379 + VkImageMemoryBarrier memoryBarrier = { 380 + .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 381 + .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, 382 + .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT, 383 + .oldLayout = VK_IMAGE_LAYOUT_GENERAL, 384 + .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 385 + .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 386 + .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 387 + .image = target_image, 388 + .subresourceRange = subresource_range, 389 + }; 390 + 391 + vk->vkCmdPipelineBarrier( // 392 + r->cmd, // 393 + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, // 394 + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // 395 + 0, // 396 + 0, // 397 + NULL, // 398 + 0, // 399 + NULL, // 400 + 1, // 401 + &memoryBarrier); // 402 + } 403 + 281 404 282 405 /* 283 406 * ··· 457 580 const struct xrt_normalized_rect src_norm_rects[XRT_MAX_VIEWS], 458 581 const struct xrt_pose src_poses[XRT_MAX_VIEWS], 459 582 const struct xrt_fov src_fovs[XRT_MAX_VIEWS], 460 - const struct xrt_pose new_poses[XRT_MAX_VIEWS], 583 + const struct xrt_pose new_poses_scanout_begin[XRT_MAX_VIEWS], 584 + const struct xrt_pose new_poses_scanout_end[XRT_MAX_VIEWS], 461 585 VkImage target_image, 462 586 VkImageView target_image_view, 463 587 const struct render_viewport_data views[XRT_MAX_VIEWS]) 464 588 { 465 589 assert(render->r != NULL); 466 - 467 - struct vk_bundle *vk = vk_from_render(render); 468 590 struct render_resources *r = render->r; 469 591 470 592 ··· 472 594 * UBO 473 595 */ 474 596 475 - struct xrt_matrix_4x4 time_warp_matrix[XRT_MAX_VIEWS]; 597 + struct xrt_matrix_4x4 time_warp_matrix_scanout_begin[XRT_MAX_VIEWS]; 598 + struct xrt_matrix_4x4 time_warp_matrix_scanout_end[XRT_MAX_VIEWS]; 476 599 for (uint32_t i = 0; i < render->r->view_count; ++i) { 477 - render_calc_time_warp_matrix( // 478 - &src_poses[i], // 479 - &src_fovs[i], // 480 - &new_poses[i], // 481 - &time_warp_matrix[i]); // 600 + render_calc_time_warp_matrix( // 601 + &src_poses[i], // 602 + &src_fovs[i], // 603 + &new_poses_scanout_begin[i], // 604 + &time_warp_matrix_scanout_begin[i]); // 605 + 606 + render_calc_time_warp_matrix( // 607 + &src_poses[i], // 608 + &src_fovs[i], // 609 + &new_poses_scanout_end[i], // 610 + &time_warp_matrix_scanout_end[i]); // 482 611 } 483 612 484 613 struct render_compute_distortion_ubo_data *data = ··· 486 615 for (uint32_t i = 0; i < render->r->view_count; ++i) { 487 616 data->views[i] = views[i]; 488 617 data->pre_transforms[i] = r->distortion.uv_to_tanangle[i]; 489 - data->transforms[i] = time_warp_matrix[i]; 618 + data->transform_timewarp_scanout_begin[i] = time_warp_matrix_scanout_begin[i]; 619 + data->transform_timewarp_scanout_end[i] = time_warp_matrix_scanout_end[i]; 490 620 data->post_transforms[i] = src_norm_rects[i]; 491 621 } 492 622 493 - /* 494 - * Source, target and distortion images. 495 - */ 623 + dispatch_project_pipeline(render, src_samplers, src_image_views, src_norm_rects, target_image, 624 + target_image_view, views, r->compute.distortion.timewarp_pipeline); 625 + } 496 626 497 - VkImageSubresourceRange subresource_range = { 498 - .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, 499 - .baseMipLevel = 0, 500 - .levelCount = VK_REMAINING_MIP_LEVELS, 501 - .baseArrayLayer = 0, 502 - .layerCount = VK_REMAINING_ARRAY_LAYERS, 503 - }; 504 - 505 - vk_cmd_image_barrier_gpu_locked( // 506 - vk, // 507 - r->cmd, // 508 - target_image, // 509 - 0, // 510 - VK_ACCESS_SHADER_WRITE_BIT, // 511 - VK_IMAGE_LAYOUT_UNDEFINED, // 512 - VK_IMAGE_LAYOUT_GENERAL, // 513 - subresource_range); // 514 - 515 - VkSampler sampler = r->samplers.clamp_to_edge; 516 - VkSampler distortion_samplers[3 * XRT_MAX_VIEWS]; 517 - for (uint32_t i = 0; i < render->r->view_count; ++i) { 518 - distortion_samplers[3 * i + 0] = sampler; 519 - distortion_samplers[3 * i + 1] = sampler; 520 - distortion_samplers[3 * i + 2] = sampler; 521 - } 522 627 523 - update_compute_shared_descriptor_set( // 524 - vk, // 525 - r->compute.src_binding, // 526 - src_samplers, // 527 - src_image_views, // 528 - r->compute.distortion_binding, // 529 - distortion_samplers, // 530 - r->distortion.image_views, // 531 - r->compute.target_binding, // 532 - target_image_view, // 533 - r->compute.ubo_binding, // 534 - r->compute.distortion.ubo.buffer, // 535 - VK_WHOLE_SIZE, // 536 - render->shared_descriptor_set, // 537 - render->r->view_count); // 538 - 539 - vk->vkCmdBindPipeline( // 540 - r->cmd, // 541 - VK_PIPELINE_BIND_POINT_COMPUTE, // pipelineBindPoint 542 - r->compute.distortion.timewarp_pipeline); // pipeline 543 - 544 - vk->vkCmdBindDescriptorSets( // 545 - r->cmd, // 546 - VK_PIPELINE_BIND_POINT_COMPUTE, // pipelineBindPoint 547 - r->compute.distortion.pipeline_layout, // layout 548 - 0, // firstSet 549 - 1, // descriptorSetCount 550 - &render->shared_descriptor_set, // pDescriptorSets 551 - 0, // dynamicOffsetCount 552 - NULL); // pDynamicOffsets 553 - 554 - 555 - uint32_t w = 0, h = 0; 556 - calc_dispatch_dims_views(views, render->r->view_count, &w, &h); 557 - assert(w != 0 && h != 0); 558 - 559 - vk->vkCmdDispatch( // 560 - r->cmd, // 561 - w, // groupCountX 562 - h, // groupCountY 563 - 2); // groupCountZ 564 - 565 - VkImageMemoryBarrier memoryBarrier = { 566 - .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 567 - .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, 568 - .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT, 569 - .oldLayout = VK_IMAGE_LAYOUT_GENERAL, 570 - .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 571 - .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 572 - .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 573 - .image = target_image, 574 - .subresourceRange = subresource_range, 575 - }; 576 - 577 - vk->vkCmdPipelineBarrier( // 578 - r->cmd, // 579 - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, // 580 - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // 581 - 0, // 582 - 0, // 583 - NULL, // 584 - 0, // 585 - NULL, // 586 - 1, // 587 - &memoryBarrier); // 588 - } 589 - 628 + /* 629 + * This function is intended to be used on content already timewarped to new_poses_scanout_begin. 630 + * It performs only the timewarp nesscary to compensate for the time delta between the start and end of 631 + * scanout. 632 + */ 590 633 void 591 - render_compute_projection(struct render_compute *render, 592 - VkSampler src_samplers[XRT_MAX_VIEWS], 593 - VkImageView src_image_views[XRT_MAX_VIEWS], 594 - const struct xrt_normalized_rect src_norm_rects[XRT_MAX_VIEWS], 595 - VkImage target_image, 596 - VkImageView target_image_view, 597 - const struct render_viewport_data views[XRT_MAX_VIEWS]) 634 + render_compute_projection_scanout_compensation(struct render_compute *render, 635 + VkSampler src_samplers[XRT_MAX_VIEWS], 636 + VkImageView src_image_views[XRT_MAX_VIEWS], 637 + const struct xrt_normalized_rect src_rects[XRT_MAX_VIEWS], 638 + const struct xrt_fov src_fovs[XRT_MAX_VIEWS], 639 + const struct xrt_pose new_poses_scanout_begin[XRT_MAX_VIEWS], 640 + const struct xrt_pose new_poses_scanout_end[XRT_MAX_VIEWS], 641 + VkImage target_image, 642 + VkImageView target_image_view, 643 + const struct render_viewport_data views[XRT_MAX_VIEWS]) 598 644 { 599 645 assert(render->r != NULL); 600 - 601 - struct vk_bundle *vk = vk_from_render(render); 602 646 struct render_resources *r = render->r; 603 647 604 648 ··· 606 650 * UBO 607 651 */ 608 652 653 + struct xrt_matrix_4x4 time_warp_matrix_scanout_begin[XRT_MAX_VIEWS]; 654 + struct xrt_matrix_4x4 time_warp_matrix_scanout_end[XRT_MAX_VIEWS]; 655 + for (uint32_t i = 0; i < render->r->view_count; ++i) { 656 + render_calc_time_warp_projection(&src_fovs[i], &time_warp_matrix_scanout_begin[i]); 657 + 658 + render_calc_time_warp_matrix( // 659 + &new_poses_scanout_begin[i], // 660 + &src_fovs[i], // 661 + &new_poses_scanout_end[i], // 662 + &time_warp_matrix_scanout_end[i]); // 663 + } 664 + 609 665 struct render_compute_distortion_ubo_data *data = 610 666 (struct render_compute_distortion_ubo_data *)r->compute.distortion.ubo.mapped; 611 667 for (uint32_t i = 0; i < render->r->view_count; ++i) { 612 668 data->views[i] = views[i]; 613 - data->post_transforms[i] = src_norm_rects[i]; 669 + data->pre_transforms[i] = r->distortion.uv_to_tanangle[i]; 670 + data->transform_timewarp_scanout_begin[i] = time_warp_matrix_scanout_begin[i]; 671 + data->transform_timewarp_scanout_end[i] = time_warp_matrix_scanout_end[i]; 672 + data->post_transforms[i] = src_rects[i]; 614 673 } 615 674 675 + dispatch_project_pipeline(render, src_samplers, src_image_views, src_rects, target_image, target_image_view, 676 + views, r->compute.distortion.timewarp_pipeline); 677 + } 616 678 617 - /* 618 - * Source, target and distortion images. 619 - */ 620 - 621 - VkImageSubresourceRange subresource_range = { 622 - .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, 623 - .baseMipLevel = 0, 624 - .levelCount = VK_REMAINING_MIP_LEVELS, 625 - .baseArrayLayer = 0, 626 - .layerCount = VK_REMAINING_ARRAY_LAYERS, 627 - }; 628 - 629 - vk_cmd_image_barrier_gpu_locked( // 630 - vk, // 631 - r->cmd, // 632 - target_image, // 633 - 0, // 634 - VK_ACCESS_SHADER_WRITE_BIT, // 635 - VK_IMAGE_LAYOUT_UNDEFINED, // 636 - VK_IMAGE_LAYOUT_GENERAL, // 637 - subresource_range); // 638 - 639 - VkSampler sampler = r->samplers.clamp_to_edge; 640 - VkSampler distortion_samplers[3 * XRT_MAX_VIEWS]; 641 - for (uint32_t i = 0; i < render->r->view_count; ++i) { 642 - distortion_samplers[3 * i + 0] = sampler; 643 - distortion_samplers[3 * i + 1] = sampler; 644 - distortion_samplers[3 * i + 2] = sampler; 645 - } 679 + void 680 + render_compute_projection_no_timewarp(struct render_compute *render, 681 + VkSampler src_samplers[XRT_MAX_VIEWS], 682 + VkImageView src_image_views[XRT_MAX_VIEWS], 683 + const struct xrt_normalized_rect src_rects[XRT_MAX_VIEWS], 684 + VkImage target_image, 685 + VkImageView target_image_view, 686 + const struct render_viewport_data views[XRT_MAX_VIEWS]) 687 + { 688 + assert(render->r != NULL); 689 + struct render_resources *r = render->r; 646 690 647 - update_compute_shared_descriptor_set( // 648 - vk, // 649 - r->compute.src_binding, // 650 - src_samplers, // 651 - src_image_views, // 652 - r->compute.distortion_binding, // 653 - distortion_samplers, // 654 - r->distortion.image_views, // 655 - r->compute.target_binding, // 656 - target_image_view, // 657 - r->compute.ubo_binding, // 658 - r->compute.distortion.ubo.buffer, // 659 - VK_WHOLE_SIZE, // 660 - render->shared_descriptor_set, // 661 - render->r->view_count); // 662 - 663 - vk->vkCmdBindPipeline( // 664 - r->cmd, // 665 - VK_PIPELINE_BIND_POINT_COMPUTE, // pipelineBindPoint 666 - r->compute.distortion.pipeline); // pipeline 667 - 668 - vk->vkCmdBindDescriptorSets( // 669 - r->cmd, // 670 - VK_PIPELINE_BIND_POINT_COMPUTE, // pipelineBindPoint 671 - r->compute.distortion.pipeline_layout, // layout 672 - 0, // firstSet 673 - 1, // descriptorSetCount 674 - &render->shared_descriptor_set, // pDescriptorSets 675 - 0, // dynamicOffsetCount 676 - NULL); // pDynamicOffsets 677 - 678 - 679 - uint32_t w = 0, h = 0; 680 - calc_dispatch_dims_views(views, render->r->view_count, &w, &h); 681 - assert(w != 0 && h != 0); 682 - 683 - vk->vkCmdDispatch( // 684 - r->cmd, // 685 - w, // groupCountX 686 - h, // groupCountY 687 - 2); // groupCountZ 688 - 689 - VkImageMemoryBarrier memoryBarrier = { 690 - .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 691 - .srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT, 692 - .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT, 693 - .oldLayout = VK_IMAGE_LAYOUT_GENERAL, 694 - .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 695 - .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 696 - .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, 697 - .image = target_image, 698 - .subresourceRange = subresource_range, 699 - }; 700 - 701 - vk->vkCmdPipelineBarrier( // 702 - r->cmd, // 703 - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, // 704 - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // 705 - 0, // 706 - 0, // 707 - NULL, // 708 - 0, // 709 - NULL, // 710 - 1, // 711 - &memoryBarrier); // 691 + dispatch_project_pipeline(render, src_samplers, src_image_views, src_rects, target_image, target_image_view, 692 + views, r->compute.distortion.pipeline); 712 693 } 713 694 714 695 void
+33 -11
src/xrt/compositor/render/render_interface.h
··· 95 95 */ 96 96 97 97 /*! 98 + * Create a simplified projection matrix for timewarp. 99 + */ 100 + void 101 + render_calc_time_warp_projection(const struct xrt_fov *fov, struct xrt_matrix_4x4 *result); 102 + 103 + /*! 98 104 * Calculates a timewarp matrix which takes in NDC coords and gives out results 99 105 * in [-1, 1] space that needs a perspective divide. 100 106 */ ··· 1256 1262 */ 1257 1263 1258 1264 //! Timewarp matrices 1259 - struct xrt_matrix_4x4 transforms[RENDER_MAX_LAYERS]; 1260 - 1265 + struct xrt_matrix_4x4 transforms_timewarp[RENDER_MAX_LAYERS]; 1261 1266 1262 1267 /*! 1263 1268 * For quad layers ··· 1294 1299 struct render_viewport_data views[XRT_MAX_VIEWS]; 1295 1300 struct xrt_normalized_rect pre_transforms[XRT_MAX_VIEWS]; 1296 1301 struct xrt_normalized_rect post_transforms[XRT_MAX_VIEWS]; 1297 - struct xrt_matrix_4x4 transforms[XRT_MAX_VIEWS]; 1302 + struct xrt_matrix_4x4 transform_timewarp_scanout_begin[XRT_MAX_VIEWS]; 1303 + struct xrt_matrix_4x4 transform_timewarp_scanout_end[XRT_MAX_VIEWS]; 1298 1304 }; 1299 1305 1300 1306 /*! ··· 1364 1370 const struct xrt_normalized_rect src_rects[XRT_MAX_VIEWS], 1365 1371 const struct xrt_pose src_poses[XRT_MAX_VIEWS], 1366 1372 const struct xrt_fov src_fovs[XRT_MAX_VIEWS], 1367 - const struct xrt_pose new_poses[XRT_MAX_VIEWS], 1373 + const struct xrt_pose new_poses_scanout_begin[XRT_MAX_VIEWS], 1374 + const struct xrt_pose new_poses_scanout_end[XRT_MAX_VIEWS], 1368 1375 VkImage target_image, 1369 1376 VkImageView target_image_view, 1370 1377 const struct render_viewport_data views[XRT_MAX_VIEWS]); ··· 1373 1380 * @public @memberof render_compute 1374 1381 */ 1375 1382 void 1376 - render_compute_projection(struct render_compute *render, 1377 - VkSampler src_samplers[XRT_MAX_VIEWS], 1378 - VkImageView src_image_views[XRT_MAX_VIEWS], 1379 - const struct xrt_normalized_rect src_rects[XRT_MAX_VIEWS], 1380 - VkImage target_image, 1381 - VkImageView target_image_view, 1382 - const struct render_viewport_data views[XRT_MAX_VIEWS]); 1383 + render_compute_projection_scanout_compensation(struct render_compute *render, 1384 + VkSampler src_samplers[XRT_MAX_VIEWS], 1385 + VkImageView src_image_views[XRT_MAX_VIEWS], 1386 + const struct xrt_normalized_rect src_rects[XRT_MAX_VIEWS], 1387 + const struct xrt_fov src_fovs[XRT_MAX_VIEWS], 1388 + const struct xrt_pose new_poses_scanout_begin[XRT_MAX_VIEWS], 1389 + const struct xrt_pose new_poses_scanout_end[XRT_MAX_VIEWS], 1390 + VkImage target_image, 1391 + VkImageView target_image_view, 1392 + const struct render_viewport_data views[XRT_MAX_VIEWS]); 1393 + 1394 + /*! 1395 + * @public @memberof render_compute 1396 + */ 1397 + void 1398 + render_compute_projection_no_timewarp(struct render_compute *render, 1399 + VkSampler src_samplers[XRT_MAX_VIEWS], 1400 + VkImageView src_image_views[XRT_MAX_VIEWS], 1401 + const struct xrt_normalized_rect src_rects[XRT_MAX_VIEWS], 1402 + VkImage target_image, 1403 + VkImageView target_image_view, 1404 + const struct render_viewport_data views[XRT_MAX_VIEWS]); 1383 1405 1384 1406 /*! 1385 1407 * @public @memberof render_compute
+11
src/xrt/compositor/render/render_util.c
··· 124 124 } 125 125 126 126 void 127 + render_calc_time_warp_projection(const struct xrt_fov *fov, struct xrt_matrix_4x4 *result) 128 + { 129 + struct xrt_matrix_4x4_f64 tmp; 130 + calc_projection(fov, &tmp); 131 + 132 + for (int i = 0; i < 16; i++) { 133 + result->v[i] = (float)tmp.v[i]; 134 + } 135 + } 136 + 137 + void 127 138 render_calc_uv_to_tangent_lengths_rect(const struct xrt_fov *fov, struct xrt_normalized_rect *out_rect) 128 139 { 129 140 const struct xrt_fov copy = *fov;
+14 -15
src/xrt/compositor/shaders/distortion.comp
··· 24 24 ivec4 views[2]; 25 25 vec4 pre_transform[2]; 26 26 vec4 post_transform[2]; 27 - mat4 transform[2]; 28 - } ubo; 27 + mat4 transform_timewarp_scanout_begin[2]; 28 + mat4 transform_timewarp_scanout_end[2]; 29 + }ubo; 29 30 30 31 31 32 vec2 position_to_uv(ivec2 extent, uint ix, uint iy) ··· 79 80 values.xy = values.xy * ubo.pre_transform[iz].zw + ubo.pre_transform[iz].xy; 80 81 values.y = -values.y; // Flip to OpenXR coordinate system. 81 82 82 - // Timewarp. 83 - values = ubo.transform[iz] * values; 83 + // Timewarp including scanline timewarp for rolling refresh panels. 84 + values = ubo.transform_timewarp_scanout_begin[iz] * values * (1 - uv.y) + 85 + ubo.transform_timewarp_scanout_end[iz] * values * uv.y; 84 86 values.xy = values.xy * (1.0 / max(values.w, 0.00001)); 85 87 86 88 // From [-1, 1] to [0, 1] 87 89 values.xy = values.xy * 0.5 + 0.5; 88 90 89 - // To deal with OpenGL flip and sub image view. 90 - values.xy = values.xy * ubo.post_transform[iz].zw + ubo.post_transform[iz].xy; 91 - 92 91 // Done. 93 92 return values.xy; 94 93 } ··· 96 95 vec2 transform_uv(vec2 uv, uint iz) 97 96 { 98 97 if (do_timewarp) { 99 - return transform_uv_timewarp(uv, iz); 100 - } else { 101 - return transform_uv_subimage(uv, iz); 98 + uv = transform_uv_timewarp(uv, iz); 102 99 } 100 + 101 + return transform_uv_subimage(uv, iz); 103 102 } 104 103 105 104 void main() ··· 127 126 b_uv = transform_uv(b_uv, iz); 128 127 129 128 // Sample the source with distorted and chromatic-aberration corrected samples. 130 - vec4 colour = vec4( 131 - texture(source[iz], r_uv).r, 132 - texture(source[iz], g_uv).g, 133 - texture(source[iz], b_uv).b, 134 - 1); 129 + vec4 colour = vec4( // 130 + texture(source[iz], r_uv).r, // 131 + texture(source[iz], g_uv).g, // 132 + texture(source[iz], b_uv).b, // 133 + 1); // 135 134 136 135 // Do colour correction here since there are no automatic conversion in hardware available. 137 136 colour = vec4(from_linear_to_srgb(colour.rgb), 1);
+11 -9
src/xrt/compositor/util/comp_high_level_render.c
··· 147 147 148 148 void 149 149 chl_frame_state_cs_set_views(struct chl_frame_state *frame_state, 150 - const struct xrt_pose world_poses[XRT_MAX_VIEWS], 150 + const struct xrt_pose world_poses_scanout_begin[XRT_MAX_VIEWS], 151 + const struct xrt_pose world_poses_scanout_end[XRT_MAX_VIEWS], 151 152 const struct xrt_pose eye_poses[XRT_MAX_VIEWS], 152 153 const struct xrt_fov fovs[XRT_MAX_VIEWS], 153 154 uint32_t layer_count) ··· 172 173 173 174 VkImageView storage_view = comp_scratch_single_images_get_storage_view(scratch_view, scratch_index); 174 175 175 - comp_render_cs_add_squash_view( // 176 - &frame_state->data, // 177 - &world_poses[i], // 178 - &eye_poses[i], // 179 - &fovs[i], // 180 - rsci->image, // squash_image 181 - storage_view, // squash_storage_view 182 - &layer_viewport_data); // squash_viewport_data 176 + comp_render_cs_add_squash_view( // 177 + &frame_state->data, // 178 + &world_poses_scanout_begin[i], // 179 + &world_poses_scanout_end[i], // 180 + &eye_poses[i], // 181 + &fovs[i], // 182 + rsci->image, // squash_image 183 + storage_view, // squash_storage_view 184 + &layer_viewport_data); // squash_viewport_data 183 185 184 186 if (layer_count == 0) { 185 187 frame_state->scratch_state.views[i].used = false;
+11 -8
src/xrt/compositor/util/comp_high_level_render.h
··· 172 172 */ 173 173 void 174 174 chl_frame_state_cs_set_views(struct chl_frame_state *frame_state, 175 - const struct xrt_pose world_pose[XRT_MAX_VIEWS], 175 + const struct xrt_pose world_pose_scanout_begin[XRT_MAX_VIEWS], 176 + const struct xrt_pose world_pose_scanout_end[XRT_MAX_VIEWS], 176 177 const struct xrt_pose eye_pose[XRT_MAX_VIEWS], 177 178 const struct xrt_fov fov[XRT_MAX_VIEWS], 178 179 uint32_t layer_count); ··· 200 201 struct render_compute *render, 201 202 const struct comp_layer *layers, 202 203 uint32_t layer_count, 203 - const struct xrt_pose world_poses[XRT_MAX_VIEWS], 204 + const struct xrt_pose world_poses_scanout_begin[XRT_MAX_VIEWS], 205 + const struct xrt_pose world_poses_scanout_end[XRT_MAX_VIEWS], 204 206 const struct xrt_pose eye_poses[XRT_MAX_VIEWS], 205 207 const struct xrt_fov fovs[XRT_MAX_VIEWS], 206 208 VkImage target_image, 207 209 VkImageView target_storage_view, 208 210 const struct render_viewport_data target_viewport_datas[XRT_MAX_VIEWS]) 209 211 { 210 - chl_frame_state_cs_set_views( // 211 - frame_state, // 212 - world_poses, // 213 - eye_poses, // 214 - fovs, // 215 - layer_count); // 212 + chl_frame_state_cs_set_views( // 213 + frame_state, // 214 + world_poses_scanout_begin, // 215 + world_poses_scanout_end, // 216 + eye_poses, // 217 + fovs, // 218 + layer_count); // 216 219 217 220 chl_frame_state_cs_set_target( // 218 221 frame_state, //
+20 -9
src/xrt/compositor/util/comp_render.h
··· 71 71 */ 72 72 struct comp_render_view_data 73 73 { 74 - //! New world pose of this view. 75 - struct xrt_pose world_pose; 74 + //! New world pose of this view at the beginng of scanout. 75 + struct xrt_pose world_pose_scanout_begin; 76 + 77 + //! New world pose of this view at the end of scanout. 78 + struct xrt_pose world_pose_scanout_end; 76 79 77 80 //! New eye pose of this view. 78 81 struct xrt_pose eye_pose; ··· 250 253 */ 251 254 static inline struct comp_render_view_data * 252 255 comp_render_dispatch_add_squash_view(struct comp_render_dispatch_data *data, 253 - const struct xrt_pose *world_pose, 256 + const struct xrt_pose *world_pose_scanout_begin, 257 + const struct xrt_pose *world_pose_scanout_end, 254 258 const struct xrt_pose *eye_pose, 255 259 const struct xrt_fov *fov, 256 260 VkImage squash_image, ··· 265 269 render_calc_uv_to_tangent_lengths_rect(fov, &view->pre_transform); 266 270 267 271 // Common 268 - view->world_pose = *world_pose; 272 + view->world_pose_scanout_begin = *world_pose_scanout_begin; 273 + view->world_pose_scanout_end = *world_pose_scanout_end; 269 274 view->eye_pose = *eye_pose; 270 275 view->fov = *fov; 271 276 ··· 372 377 { 373 378 struct comp_render_view_data *view = comp_render_dispatch_add_squash_view( // 374 379 data, // 380 + world_pose, // 375 381 world_pose, // 376 382 eye_pose, // 377 383 fov, // ··· 517 523 * Add view to the common data, as required by the CS renderer. 518 524 * 519 525 * @param[in,out] data Common render dispatch data, will be updated 520 - * @param world_pose New world pose of this view. 526 + * @param world_pose_scanout_begin New world pose of this view. 527 + * Populates @ref comp_render_view_data::world_pose 528 + * @param world_pose_scanout_end New world pose of this view. 521 529 * Populates @ref comp_render_view_data::world_pose 522 530 * @param eye_pose New eye pose of this view 523 - * Populates @ref comp_render_view_data::eye_pose 531 + * Populates @ref comp_render_view_data::eye_pose_scanout_end 524 532 * @param fov Assigned to fov in the view data, and used to compute @ref comp_render_view_data::pre_transform. 525 533 * Populates @ref comp_render_view_data::fov 526 534 * @param squash_image Scratch image for this view ··· 537 545 */ 538 546 static inline void 539 547 comp_render_cs_add_squash_view(struct comp_render_dispatch_data *data, 540 - const struct xrt_pose *world_pose, 548 + const struct xrt_pose *world_pose_scanout_begin, 549 + const struct xrt_pose *world_pose_scanout_end, 541 550 const struct xrt_pose *eye_pose, 542 551 const struct xrt_fov *fov, 543 552 VkImage squash_image, ··· 546 555 { 547 556 struct comp_render_view_data *view = comp_render_dispatch_add_squash_view( // 548 557 data, // 549 - world_pose, // 558 + world_pose_scanout_begin, // 559 + world_pose_scanout_end, // 550 560 eye_pose, // 551 561 fov, // 552 562 squash_image, // ··· 604 614 const struct comp_layer *layers, 605 615 const uint32_t layer_count, 606 616 const struct xrt_normalized_rect *pre_transform, 607 - const struct xrt_pose *world_pose, 617 + const struct xrt_pose *world_pose_scanout_begin, 618 + const struct xrt_pose *world_pose_scanout_end, 608 619 const struct xrt_pose *eye_pose, 609 620 const VkImage target_image, 610 621 const VkImageView target_image_view,
+125 -95
src/xrt/compositor/util/comp_render_cs.c
··· 185 185 /// Data setup for a projection layer 186 186 static inline void 187 187 do_cs_projection_layer(const struct comp_layer *layer, 188 - const struct xrt_pose *world_pose, 188 + const struct xrt_pose *world_pose_scanout_begin, 189 189 uint32_t view_index, 190 190 uint32_t cur_layer, 191 191 uint32_t cur_image, ··· 235 235 236 236 // unused if timewarp is off 237 237 if (do_timewarp) { 238 - render_calc_time_warp_matrix( // 239 - &vd->pose, // 240 - &vd->fov, // 241 - world_pose, // 242 - &ubo_data->transforms[cur_layer]); // 238 + render_calc_time_warp_matrix( // 239 + &vd->pose, // 240 + &vd->fov, // 241 + world_pose_scanout_begin, // 242 + &ubo_data->transforms_timewarp[cur_layer]); // 243 243 } 244 244 245 245 *out_cur_image = cur_image; ··· 373 373 VkSampler src_samplers[XRT_MAX_VIEWS]; 374 374 struct render_viewport_data target_viewport_datas[XRT_MAX_VIEWS]; 375 375 struct xrt_normalized_rect src_norm_rects[XRT_MAX_VIEWS]; 376 + struct xrt_fov src_fovs[XRT_MAX_VIEWS]; 377 + struct xrt_pose world_poses_scanout_begin[XRT_MAX_VIEWS]; 378 + struct xrt_pose world_poses_scanout_end[XRT_MAX_VIEWS]; 376 379 377 380 for (uint32_t i = 0; i < d->target.view_count; i++) { 378 381 // Data to be filled in. ··· 390 393 src_norm_rects[i] = src_norm_rect; 391 394 src_samplers[i] = clamp_to_border_black; 392 395 target_viewport_datas[i] = viewport_data; 396 + 397 + if (d->do_timewarp) { 398 + world_poses_scanout_begin[i] = d->views[i].world_pose_scanout_begin; 399 + world_poses_scanout_end[i] = d->views[i].world_pose_scanout_end; 400 + src_fovs[i] = d->views[i].fov; 401 + } 393 402 } 394 403 395 - render_compute_projection( // 396 - render, // 397 - src_samplers, // 398 - src_image_views, // 399 - src_norm_rects, // 400 - d->target.cs.image, // 401 - d->target.cs.storage_view, // target_image_view 402 - target_viewport_datas); // views 404 + if (!d->do_timewarp) { 405 + render_compute_projection_no_timewarp( // 406 + render, // 407 + src_samplers, // 408 + src_image_views, // 409 + src_norm_rects, // 410 + d->target.cs.image, // 411 + d->target.cs.storage_view, // target_image_view 412 + target_viewport_datas); // views 413 + } else { 414 + render_compute_projection_scanout_compensation( // 415 + render, // 416 + src_samplers, // 417 + src_image_views, // 418 + src_norm_rects, // 419 + src_fovs, // 420 + world_poses_scanout_begin, // 421 + world_poses_scanout_end, // 422 + d->target.cs.image, // 423 + d->target.cs.storage_view, // target_image_view 424 + target_viewport_datas); // views 425 + } 403 426 } 404 427 405 428 /// Fast path ··· 427 450 struct xrt_normalized_rect src_norm_rects[XRT_MAX_VIEWS]; 428 451 struct xrt_fov src_fovs[XRT_MAX_VIEWS]; 429 452 struct xrt_pose src_poses[XRT_MAX_VIEWS]; 430 - struct xrt_pose world_poses[XRT_MAX_VIEWS]; 453 + struct xrt_pose world_poses_scanout_begin[XRT_MAX_VIEWS]; 454 + struct xrt_pose world_poses_scanout_end[XRT_MAX_VIEWS]; 431 455 432 456 for (uint32_t i = 0; i < d->target.view_count; i++) { 433 457 // Data to be filled in. ··· 436 460 struct xrt_normalized_rect src_norm_rect; 437 461 struct xrt_fov src_fov; 438 462 struct xrt_pose src_pose; 439 - struct xrt_pose world_pose; 463 + struct xrt_pose world_pose_scanout_begin; 464 + struct xrt_pose world_pose_scanout_end; 440 465 uint32_t array_index = vds[i]->sub.array_index; 441 466 const struct comp_swapchain_image *image = get_layer_image(layer, i, vds[i]->sub.image_index); 442 467 ··· 446 471 viewport_data = d->views[i].target.viewport_data; 447 472 src_fov = vds[i]->fov; 448 473 src_pose = vds[i]->pose; 449 - world_pose = d->views[i].world_pose; 474 + world_pose_scanout_begin = d->views[i].world_pose_scanout_begin; 475 + world_pose_scanout_end = d->views[i].world_pose_scanout_end; 450 476 451 477 // No layer squasher has handled this for us already 452 478 if (data->flip_y) { ··· 461 487 target_viewport_datas[i] = viewport_data; 462 488 src_fovs[i] = src_fov; 463 489 src_poses[i] = src_pose; 464 - world_poses[i] = world_pose; 490 + world_poses_scanout_begin[i] = world_pose_scanout_begin; 491 + world_poses_scanout_end[i] = world_pose_scanout_end; 465 492 } 466 493 467 494 if (!d->do_timewarp) { 468 - render_compute_projection( // 469 - render, // 470 - src_samplers, // 471 - src_image_views, // 472 - src_norm_rects, // 473 - d->target.cs.image, // 474 - d->target.cs.storage_view, // 475 - target_viewport_datas); // 495 + render_compute_projection_no_timewarp( // 496 + render, // 497 + src_samplers, // 498 + src_image_views, // 499 + src_norm_rects, // 500 + d->target.cs.image, // 501 + d->target.cs.storage_view, // 502 + target_viewport_datas); // 476 503 } else { 477 504 render_compute_projection_timewarp( // 478 505 render, // ··· 481 508 src_norm_rects, // 482 509 src_poses, // 483 510 src_fovs, // 484 - world_poses, // 511 + world_poses_scanout_begin, // 512 + world_poses_scanout_end, // 485 513 d->target.cs.image, // 486 514 d->target.cs.storage_view, // 487 515 target_viewport_datas); // ··· 501 529 const struct comp_layer *layers, 502 530 const uint32_t layer_count, 503 531 const struct xrt_normalized_rect *pre_transform, 504 - const struct xrt_pose *world_pose, 532 + const struct xrt_pose *world_pose_scanout_begin, 533 + const struct xrt_pose *world_pose_scanout_end, 505 534 const struct xrt_pose *eye_pose, 506 535 const VkImage target_image, 507 536 const VkImageView target_image_view, ··· 512 541 VkSampler clamp_to_border_black = render->r->samplers.clamp_to_border_black; 513 542 514 543 // Not the transform of the views, but the inverse: actual view matrices. 515 - struct xrt_matrix_4x4 world_view_mat, eye_view_mat; 516 - math_matrix_4x4_view_from_pose(world_pose, &world_view_mat); 517 - math_matrix_4x4_view_from_pose(eye_pose, &eye_view_mat); 544 + struct xrt_matrix_4x4 world_view_mat_scanout_begin, eye_view; 545 + math_matrix_4x4_view_from_pose(world_pose_scanout_begin, &world_view_mat_scanout_begin); 546 + math_matrix_4x4_view_from_pose(eye_pose, &eye_view); 518 547 519 548 struct render_buffer *ubo = &render->r->compute.layer.ubos[view_index]; 520 549 struct render_compute_layer_ubo_data *ubo_data = ubo->mapped; ··· 564 593 565 594 switch (data->type) { 566 595 case XRT_LAYER_CYLINDER: 567 - do_cs_cylinder_layer( // 568 - layer, // layer 569 - &eye_view_mat, // eye_view_mat 570 - &world_view_mat, // world_view_mat 571 - view_index, // view_index 572 - cur_layer, // cur_layer 573 - cur_image, // cur_image 574 - clamp_to_edge, // clamp_to_edge 575 - clamp_to_border_black, // clamp_to_border_black 576 - src_samplers, // src_samplers 577 - src_image_views, // src_image_views 578 - ubo_data, // ubo_data 579 - &cur_image); // out_cur_image 596 + do_cs_cylinder_layer( // 597 + layer, // layer 598 + &eye_view, // eye_view_mat 599 + &world_view_mat_scanout_begin, // world_view_mat 600 + view_index, // view_index 601 + cur_layer, // cur_layer 602 + cur_image, // cur_image 603 + clamp_to_edge, // clamp_to_edge 604 + clamp_to_border_black, // clamp_to_border_black 605 + src_samplers, // src_samplers 606 + src_image_views, // src_image_views 607 + ubo_data, // ubo_data 608 + &cur_image); // out_cur_image 580 609 break; 581 610 case XRT_LAYER_EQUIRECT2: 582 - do_cs_equirect2_layer( // 583 - layer, // layer 584 - &eye_view_mat, // eye_view_mat 585 - &world_view_mat, // world_view_mat 586 - view_index, // view_index 587 - cur_layer, // cur_layer 588 - cur_image, // cur_image 589 - clamp_to_edge, // clamp_to_edge 590 - clamp_to_border_black, // clamp_to_border_black 591 - src_samplers, // src_samplers 592 - src_image_views, // src_image_views 593 - ubo_data, // ubo_data 594 - &cur_image); // out_cur_image 611 + do_cs_equirect2_layer( // 612 + layer, // layer 613 + &eye_view, // eye_view_mat 614 + &world_view_mat_scanout_begin, // world_view_mat 615 + view_index, // view_index 616 + cur_layer, // cur_layer 617 + cur_image, // cur_image 618 + clamp_to_edge, // clamp_to_edge 619 + clamp_to_border_black, // clamp_to_border_black 620 + src_samplers, // src_samplers 621 + src_image_views, // src_image_views 622 + ubo_data, // ubo_data 623 + &cur_image); // out_cur_image 595 624 break; 596 625 case XRT_LAYER_PROJECTION_DEPTH: 597 626 case XRT_LAYER_PROJECTION: { 598 - do_cs_projection_layer( // 599 - layer, // layer 600 - world_pose, // world_pose 601 - view_index, // view_index 602 - cur_layer, // cur_layer 603 - cur_image, // cur_image 604 - clamp_to_edge, // clamp_to_edge 605 - clamp_to_border_black, // clamp_to_border_black 606 - src_samplers, // src_samplers 607 - src_image_views, // src_image_views 608 - ubo_data, // ubo_data 609 - do_timewarp, // do_timewarp 610 - &cur_image); // out_cur_image 627 + do_cs_projection_layer( // 628 + layer, // layer 629 + world_pose_scanout_begin, // world_pose_scanout_begin 630 + view_index, // view_index 631 + cur_layer, // cur_layer 632 + cur_image, // cur_image 633 + clamp_to_edge, // clamp_to_edge 634 + clamp_to_border_black, // clamp_to_border_black 635 + src_samplers, // src_samplers 636 + src_image_views, // src_image_views 637 + ubo_data, // ubo_data 638 + do_timewarp, // do_timewarp 639 + &cur_image); // out_cur_image 611 640 } break; 612 641 case XRT_LAYER_QUAD: { 613 - do_cs_quad_layer( // 614 - layer, // layer 615 - &eye_view_mat, // eye_view_mat 616 - &world_view_mat, // world_view_mat 617 - view_index, // view_index 618 - cur_layer, // cur_layer 619 - cur_image, // cur_image 620 - clamp_to_edge, // clamp_to_edge 621 - clamp_to_border_black, // clamp_to_border_black 622 - src_samplers, // src_samplers 623 - src_image_views, // src_image_views 624 - ubo_data, // ubo_data 625 - &cur_image); // out_cur_image 642 + do_cs_quad_layer( // 643 + layer, // layer 644 + &eye_view, // eye_view_mat 645 + &world_view_mat_scanout_begin, // world_view_mat_scanout_begin 646 + view_index, // view_index 647 + cur_layer, // cur_layer 648 + cur_image, // cur_image 649 + clamp_to_edge, // clamp_to_edge 650 + clamp_to_border_black, // clamp_to_border_black 651 + src_samplers, // src_samplers 652 + src_image_views, // src_image_views 653 + ubo_data, // ubo_data 654 + &cur_image); // out_cur_image 626 655 } break; 627 656 default: 628 657 // Should not get here! ··· 687 716 for (uint32_t view_index = 0; view_index < d->squash_view_count; view_index++) { 688 717 const struct comp_render_view_data *view = &d->views[view_index]; 689 718 690 - comp_render_cs_layer( // 691 - render, // 692 - view_index, // 693 - layers, // 694 - layer_count, // 695 - &view->pre_transform, // 696 - &view->world_pose, // 697 - &view->eye_pose, // 698 - view->squash.image, // 699 - view->squash.cs.storage_view, // 700 - &view->squash.viewport_data, // 701 - d->do_timewarp); // 719 + comp_render_cs_layer( // 720 + render, // 721 + view_index, // 722 + layers, // 723 + layer_count, // 724 + &view->pre_transform, // 725 + &view->world_pose_scanout_begin, // 726 + &view->world_pose_scanout_end, // 727 + &view->eye_pose, // 728 + view->squash.image, // 729 + view->squash.cs.storage_view, // 730 + &view->squash.viewport_data, // 731 + d->do_timewarp); // 702 732 } 703 733 704 734 cmd_barrier_view_squash_images( //
+7 -7
src/xrt/compositor/util/comp_render_gfx.c
··· 531 531 if (do_timewarp) { 532 532 data.pre_transform = d->views[i].pre_transform; 533 533 534 - render_calc_time_warp_matrix( // 535 - &md->views[i].src_pose, // 536 - &md->views[i].src_fov, // 537 - &d->views[i].world_pose, // 538 - &data.transform); // 534 + render_calc_time_warp_matrix( // 535 + &md->views[i].src_pose, // 536 + &md->views[i].src_fov, // 537 + &d->views[i].world_pose_scanout_begin, // 538 + &data.transform); // 539 539 } 540 540 541 541 ret = render_gfx_mesh_alloc_and_write( // ··· 596 596 597 597 struct gfx_mesh_data md = XRT_STRUCT_INIT; 598 598 for (uint32_t i = 0; i < d->target.view_count; i++) { 599 - struct xrt_pose src_pose = d->views[i].world_pose; 599 + struct xrt_pose src_pose = d->views[i].world_pose_scanout_begin; 600 600 struct xrt_fov src_fov = d->views[i].fov; 601 601 VkImageView src_image_view = d->views[i].squash_as_src.sample_view; 602 602 struct xrt_normalized_rect src_norm_rect = d->views[i].squash_as_src.norm_rect; ··· 693 693 for (uint32_t view = 0; view < d->squash_view_count; view++) { 694 694 695 695 // Data for this view, convenience. 696 - const struct xrt_pose world_pose = d->views[view].world_pose; 696 + const struct xrt_pose world_pose = d->views[view].world_pose_scanout_begin; 697 697 const struct xrt_pose eye_pose = d->views[view].eye_pose; 698 698 const struct xrt_fov new_fov = d->views[view].fov; 699 699
+2
src/xrt/drivers/android/android_sensors.c
··· 392 392 } 393 393 394 394 d->base.hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / metrics.refresh_rate); 395 + d->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 396 + d->base.hmd->screens[0].scanout_time_ns = 0; 395 397 396 398 const uint32_t w_pixels = metrics.width_pixels; 397 399 const uint32_t h_pixels = metrics.height_pixels;
+2
src/xrt/drivers/blubur_s1/blubur_s1_hmd.c
··· 543 543 hmd->base.hmd->screens[0].w_pixels = PANEL_WIDTH; 544 544 hmd->base.hmd->screens[0].h_pixels = VIEW_SIZE; 545 545 hmd->base.hmd->screens[0].nominal_frame_interval_ns = 1000000000LLU / 120; // 120hz 546 + hmd->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 547 + hmd->base.hmd->screens[0].scanout_time_ns = 0; 546 548 547 549 hmd->base.hmd->view_count = 2; 548 550 hmd->base.hmd->views[0] = (struct xrt_view){
+3
src/xrt/drivers/hdk/hdk_device.cpp
··· 355 355 hd->base.hmd->distortion.fov[0].angle_right = -hd->base.hmd->distortion.fov[1].angle_left; 356 356 } 357 357 358 + hd->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 359 + hd->base.hmd->screens[0].scanout_time_ns = 0; 360 + 358 361 switch (variant) { 359 362 case HDK_UNKNOWN: assert(!"unknown device"); break; 360 363
+2
src/xrt/drivers/ohmd/oh_device.c
··· 800 800 ohd->base.hmd->screens[0].w_pixels = info.display.w_pixels; 801 801 ohd->base.hmd->screens[0].h_pixels = info.display.h_pixels; 802 802 ohd->base.hmd->screens[0].nominal_frame_interval_ns = info.display.nominal_frame_interval_ns; 803 + ohd->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 804 + ohd->base.hmd->screens[0].scanout_time_ns = 0; 803 805 804 806 // Left 805 807 ohd->base.hmd->views[0].display.w_pixels = info.views[0].display.w_pixels;
+2
src/xrt/drivers/rift/rift_hmd.c
··· 624 624 625 625 // Set up display details 626 626 hmd->base.hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / 75.0f); 627 + hmd->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 628 + hmd->base.hmd->screens[0].scanout_time_ns = 0; 627 629 628 630 hmd->extra_display_info.icd = MICROMETERS_TO_METERS(hmd->display_info.lens_separation); 629 631
+2
src/xrt/drivers/rokid/rokid_hmd.c
··· 496 496 // Set up display details 497 497 // refresh rate 498 498 rokid->base.hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / 60.0f); 499 + rokid->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 500 + rokid->base.hmd->screens[0].scanout_time_ns = 0; 499 501 500 502 const float quarter_vFOV = 0.25f * (is_rokid_max ? 46.0f : 40.0f) * ((float)M_PI / 180.0f); 501 503 const float quarter_hFOV = quarter_vFOV * 16.0f / 9.0f;
+2
src/xrt/drivers/sample/sample_hmd.c
··· 207 207 // Set up display details 208 208 // refresh rate 209 209 hmd->base.hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / 90.0f); 210 + hmd->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 211 + hmd->base.hmd->screens[0].scanout_time_ns = 0; 210 212 211 213 const double hFOV = 90 * (M_PI / 180.0); 212 214 const double vFOV = 96.73 * (M_PI / 180.0);
+2
src/xrt/drivers/simula/svr_hmd.c
··· 245 245 svr->base.device_type = XRT_DEVICE_TYPE_HMD; 246 246 247 247 svr->base.hmd->screens[0].nominal_frame_interval_ns = (uint64_t)time_s_to_ns(1.0f / 90.0f); 248 + svr->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 249 + svr->base.hmd->screens[0].scanout_time_ns = 0; 248 250 249 251 250 252 // Print name.
+27 -1
src/xrt/drivers/steamvr_lh/device.cpp
··· 867 867 } 868 868 } 869 869 870 + void 871 + HmdDevice::set_scanout_type(xrt_scanout_direction direction, uint64_t time_ns) 872 + { 873 + auto set = [this, direction, time_ns] { 874 + hmd_parts->base.screens[0].scanout_direction = direction; 875 + hmd_parts->base.screens[0].scanout_time_ns = time_ns; 876 + }; 877 + 878 + if (hmd_parts) { 879 + set(); 880 + } else { 881 + std::thread t([this, set] { 882 + std::unique_lock lk(hmd_parts_mut); 883 + hmd_parts_cv.wait(lk, [this] { return hmd_parts != nullptr; }); 884 + set(); 885 + }); 886 + t.detach(); 887 + } 888 + } 889 + 870 890 namespace { 871 891 // From openvr driver documentation 872 892 // (https://github.com/ValveSoftware/openvr/blob/master/docs/Driver_API_Documentation.md#Input-Profiles): ··· 1027 1047 case vr::Prop_DisplayFrequency_Float: { 1028 1048 assert(prop.unBufferSize == sizeof(float)); 1029 1049 float freq = *static_cast<float *>(prop.pvBuffer); 1030 - set_nominal_frame_interval((1.f / freq) * 1e9f); 1050 + uint64_t interval_ns = (1.f / freq) * 1e9f; 1051 + set_nominal_frame_interval(interval_ns); 1052 + if (variant == VIVE_VARIANT_PRO) { 1053 + set_scanout_type(XRT_SCANOUT_DIRECTION_TOP_TO_BOTTOM, interval_ns * 1600.0 / 1624.0); 1054 + } else { 1055 + set_scanout_type(XRT_SCANOUT_DIRECTION_NONE, 0); 1056 + } 1031 1057 break; 1032 1058 } 1033 1059 case vr::Prop_UserIpdMeters_Float: {
+3
src/xrt/drivers/steamvr_lh/device.hpp
··· 210 210 void 211 211 set_nominal_frame_interval(uint64_t interval_ns); 212 212 213 + void 214 + set_scanout_type(enum xrt_scanout_direction direction, uint64_t time_ns); 215 + 213 216 std::condition_variable hmd_parts_cv; 214 217 std::mutex hmd_parts_mut; 215 218 float brightness{1.0f};
+10
src/xrt/drivers/survive/survive_driver.c
··· 979 979 survive->base.hmd->screens[0].nominal_frame_interval_ns = (uint64_t)time_s_to_ns(1.0f / 90.0f); 980 980 } 981 981 982 + if (survive->hmd.config.variant == VIVE_VARIANT_PRO) { 983 + survive->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_TOP_TO_BOTTOM; 984 + survive->base.hmd->screens[0].scanout_time_ns = survive->base.hmd->screens[0].nominal_frame_interval_ns; 985 + // Compensate for the length of vblank. 986 + survive->base.hmd->screens[0].scanout_time_ns *= 1600.0 / 1624.0; 987 + } else { 988 + survive->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 989 + survive->base.hmd->screens[0].scanout_time_ns = 0; 990 + } 991 + 982 992 for (uint8_t eye = 0; eye < 2; eye++) { 983 993 struct xrt_view *v = &survive->base.hmd->views[eye]; 984 994 v->display.w_pixels = w_pixels;
+10
src/xrt/drivers/vive/vive_device.c
··· 1157 1157 d->base.hmd->screens[0].nominal_frame_interval_ns = (uint64_t)time_s_to_ns(1.0f / 90.0f); 1158 1158 } 1159 1159 1160 + if (d->config.variant == VIVE_VARIANT_PRO) { 1161 + d->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_TOP_TO_BOTTOM; 1162 + d->base.hmd->screens[0].scanout_time_ns = d->base.hmd->screens[0].nominal_frame_interval_ns; 1163 + // Compensate for the length of vblank. 1164 + d->base.hmd->screens[0].scanout_time_ns *= 1600.0 / 1624.0; 1165 + } else { 1166 + d->base.hmd->screens[0].scanout_direction = XRT_SCANOUT_DIRECTION_NONE; 1167 + d->base.hmd->screens[0].scanout_time_ns = 0; 1168 + } 1169 + 1160 1170 for (uint8_t eye = 0; eye < 2; eye++) { 1161 1171 struct xrt_view *v = &d->base.hmd->views[eye]; 1162 1172 v->display.w_pixels = w_pixels;
+1
src/xrt/drivers/xreal_air/xreal_air_hmd.c
··· 1198 1198 1199 1199 // Set up display details refresh rate 1200 1200 hmd->base.hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / 60.0f); 1201 + hmd->base.hmd->screens[0].scanout_time_ns = 0; 1201 1202 1202 1203 // Distortion information, fills in xdev->compute_distortion(). 1203 1204 u_distortion_mesh_set_none(&hmd->base);
+14
src/xrt/include/xrt/xrt_defines.h
··· 172 172 }; 173 173 174 174 /*! 175 + * Screen scanout direction 176 + */ 177 + enum xrt_scanout_direction 178 + { 179 + // clang-format off 180 + XRT_SCANOUT_DIRECTION_NONE = 0, 181 + XRT_SCANOUT_DIRECTION_TOP_TO_BOTTOM, 182 + XRT_SCANOUT_DIRECTION_BOTTOM_TO_TOP, 183 + XRT_SCANOUT_DIRECTION_LEFT_TO_RIGHT, 184 + XRT_SCANOUT_DIRECTION_RIGHT_TO_LEFT, 185 + // clang-format on 186 + }; 187 + 188 + /*! 175 189 * Common formats, use `u_format_*` functions to reason about them. 176 190 */ 177 191 enum xrt_format
+2
src/xrt/include/xrt/xrt_device.h
··· 102 102 int h_pixels; 103 103 //! Nominal frame interval 104 104 uint64_t nominal_frame_interval_ns; 105 + enum xrt_scanout_direction scanout_direction; 106 + uint64_t scanout_time_ns; 105 107 } screens[1]; 106 108 107 109 /*!
+1
src/xrt/targets/sdl_test/sdl_device.c
··· 115 115 116 116 // Refresh rate. 117 117 xdev->hmd->screens[0].nominal_frame_interval_ns = time_s_to_ns(1.0f / 60.0f); 118 + xdev->hmd->screens[0].scanout_time_ns = 0; 118 119 119 120 // Blend mode(s), setup after u_device_setup_split_side_by_side. 120 121 xdev->hmd->blend_modes[0] = XRT_BLEND_MODE_OPAQUE;