···11+// Copyright 2019-2024, Collabora, Ltd.
22+// Copyright 2024-2025, NVIDIA CORPORATION.
33+// SPDX-License-Identifier: BSL-1.0
44+/*!
55+ * @file
66+ * @brief Higher level interface for scratch images.
77+ * @author Jakob Bornecrantz <tbornecrantz@nvidia.com>
88+ * @author Andrei Aristarkhov <aaristarkhov@nvidia.com>
99+ * @author Gareth Morgan <gmorgan@nvidia.com>
1010+ * @author Rylie Pavlik <rylie.pavlik@collabora.com>
1111+ * @ingroup comp_main
1212+ */
1313+1414+1515+#include "comp_high_level_scratch.h"
1616+1717+1818+void
1919+chl_scratch_init(struct chl_scratch *scratch)
2020+{
2121+ for (uint32_t i = 0; i < ARRAY_SIZE(scratch->views); i++) {
2222+ comp_scratch_single_images_init(&scratch->views[i].cssi);
2323+ }
2424+}
2525+2626+void
2727+chl_scratch_fini(struct chl_scratch *scratch)
2828+{
2929+ for (uint32_t i = 0; i < ARRAY_SIZE(scratch->views); i++) {
3030+ comp_scratch_single_images_destroy(&scratch->views[i].cssi);
3131+ }
3232+}
3333+3434+bool
3535+chl_scratch_ensure(struct chl_scratch *scratch,
3636+ struct render_resources *rr,
3737+ uint32_t view_count,
3838+ VkExtent2D extent,
3939+ const VkFormat format)
4040+{
4141+ struct vk_bundle *vk = rr->vk;
4242+ bool bret = false;
4343+4444+ // Is everything already correct?
4545+ if (scratch->view_count == view_count && //
4646+ scratch->extent.width == extent.width && //
4747+ scratch->extent.height == extent.height && //
4848+ scratch->format == format) {
4949+ return true;
5050+ }
5151+5252+ // Free all old resources.
5353+ chl_scratch_free_resources(scratch, rr);
5454+5555+ // Shared render pass between all scratch images.
5656+ bret = render_gfx_render_pass_init( //
5757+ &scratch->render_pass, // rgrp
5858+ rr, // struct render_resources
5959+ format, // format
6060+ VK_ATTACHMENT_LOAD_OP_CLEAR, // load_op
6161+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); // final_layout
6262+ if (!bret) {
6363+ VK_ERROR(vk, "render_gfx_render_pass_init: false");
6464+ return false;
6565+ }
6666+6767+ // Need to track if the render pass has been initialized.
6868+ scratch->render_pass_initialized = true;
6969+7070+ for (uint32_t i = 0; i < view_count; i++) {
7171+ // Helper.
7272+ struct comp_scratch_single_images *cssi = &scratch->views[i].cssi;
7373+7474+ if (format == VK_FORMAT_R8G8B8A8_SRGB) {
7575+ // Special creation function for the mutable format.
7676+ bret = comp_scratch_single_images_ensure_mutable(cssi, vk, extent);
7777+ } else {
7878+ bret = comp_scratch_single_images_ensure(cssi, vk, extent, format);
7979+ }
8080+8181+ if (!bret) {
8282+ VK_ERROR(vk, "comp_scratch_single_images_ensure[_mutable]: false");
8383+ // Free any that has already been allocated.
8484+ chl_scratch_free_resources(scratch, rr);
8585+ return false;
8686+ }
8787+8888+ for (uint32_t k = 0; k < COMP_SCRATCH_NUM_IMAGES; k++) {
8989+9090+ /*
9191+ * For graphics parts we use the same image view as the
9292+ * source. In other words the sRGB image view for the
9393+ * non-linear formats.
9494+ */
9595+ VkImageView target_image_view = chl_scratch_get_sample_view(scratch, i, k);
9696+9797+ render_gfx_target_resources_init( //
9898+ &scratch->views[i].targets[k], // rtr
9999+ rr, // struct render_resources
100100+ &scratch->render_pass, // struct render_gfx_render_pass
101101+ target_image_view, // target
102102+ extent); // extent
103103+ }
104104+105105+ /*
106106+ * Update the count, doing it this way means free_resources
107107+ * will free the allocated images correctly. The count is one
108108+ * more then the index.
109109+ */
110110+ scratch->view_count = i + 1;
111111+ }
112112+113113+ // Update the cached values.
114114+ scratch->extent = extent;
115115+ scratch->format = format;
116116+117117+ return true;
118118+}
119119+120120+void
121121+chl_scratch_free_resources(struct chl_scratch *scratch, struct render_resources *rr)
122122+{
123123+ struct vk_bundle *vk = rr->vk;
124124+125125+ for (uint32_t i = 0; i < scratch->view_count; i++) {
126126+ for (uint32_t k = 0; k < COMP_SCRATCH_NUM_IMAGES; k++) {
127127+ render_gfx_target_resources_fini(&scratch->views[i].targets[k]);
128128+ }
129129+130130+ comp_scratch_single_images_free(&scratch->views[i].cssi, vk);
131131+ }
132132+133133+ // Nothing allocated.
134134+ scratch->view_count = 0;
135135+ scratch->extent.width = 0;
136136+ scratch->extent.height = 0;
137137+ scratch->format = VK_FORMAT_UNDEFINED;
138138+139139+ // Do this after the image targets as they reference the render pass.
140140+ if (scratch->render_pass_initialized) {
141141+ render_gfx_render_pass_fini(&scratch->render_pass);
142142+ scratch->render_pass_initialized = false;
143143+ }
144144+}
+200
src/xrt/compositor/util/comp_high_level_scratch.h
···11+// Copyright 2019-2024, Collabora, Ltd.
22+// Copyright 2024-2025, NVIDIA CORPORATION.
33+// SPDX-License-Identifier: BSL-1.0
44+/*!
55+ * @file
66+ * @brief Higher level interface for scratch images.
77+ * @author Jakob Bornecrantz <tbornecrantz@nvidia.com>
88+ * @ingroup comp_util
99+ */
1010+1111+#pragma once
1212+1313+1414+#include "render/render_interface.h"
1515+1616+#include "comp_scratch.h"
1717+1818+1919+#ifdef __cplusplus
2020+extern "C" {
2121+#endif
2222+2323+/*!
2424+ * Scratch images that can be used for staging buffers.
2525+ *
2626+ * @ingroup comp_util
2727+ */
2828+struct chl_scratch
2929+{
3030+ //! Shared render passed for the views.
3131+ struct render_gfx_render_pass render_pass;
3232+3333+ struct
3434+ {
3535+ //! Per-view scratch images.
3636+ struct comp_scratch_single_images cssi;
3737+3838+ //! Targets for rendering to the scratch buffer.
3939+ struct render_gfx_target_resources targets[COMP_SCRATCH_NUM_IMAGES];
4040+ } views[XRT_MAX_VIEWS];
4141+4242+ /*!
4343+ * Number of views that has been ensured and have Vulkan resources,
4444+ * all comp_scratch_single_images are always inited.
4545+ */
4646+ uint32_t view_count;
4747+4848+ //! The extent used to create the images.
4949+ VkExtent2D extent;
5050+5151+ //! Format requested.
5252+ VkFormat format;
5353+5454+ //! Has the render pass been initialized.
5555+ bool render_pass_initialized;
5656+};
5757+5858+/*!
5959+ * Must becalled before used and before the scratch images are registered with
6060+ * the u_var system.
6161+ *
6262+ * @memberof chl_scratch
6363+ */
6464+void
6565+chl_scratch_init(struct chl_scratch *scratch);
6666+6767+/*!
6868+ * Resources must be manually called before calling this functions, and the
6969+ * scratch images unregistered from the u_var system.
7070+ *
7171+ * @memberof chl_scratch
7272+ */
7373+void
7474+chl_scratch_fini(struct chl_scratch *scratch);
7575+7676+/*!
7777+ * Ensure the scratch images and the render target resources are created.
7878+ *
7979+ * @memberof chl_scratch
8080+ */
8181+bool
8282+chl_scratch_ensure(struct chl_scratch *scratch,
8383+ struct render_resources *rr,
8484+ uint32_t view_count,
8585+ VkExtent2D extent,
8686+ const VkFormat format);
8787+8888+/*!
8989+ * Free all Vulkan resources that this scratch has created.
9090+ *
9191+ * @memberof chl_scratch
9292+ */
9393+void
9494+chl_scratch_free_resources(struct chl_scratch *scratch, struct render_resources *rr);
9595+9696+/*!
9797+ * Get the image, see @ref comp_scratch_single_images_get_image.
9898+ *
9999+ * @memberof chl_scratch
100100+ */
101101+static inline VkImage
102102+chl_scratch_get_image(struct chl_scratch *scratch, uint32_t view_index, uint32_t image_index)
103103+{
104104+ return comp_scratch_single_images_get_image(&scratch->views[view_index].cssi, image_index);
105105+}
106106+107107+/*!
108108+ * Get the sample view, see @ref comp_scratch_single_images_get_sample_view.
109109+ *
110110+ * @memberof chl_scratch
111111+ */
112112+static inline VkImageView
113113+chl_scratch_get_sample_view(struct chl_scratch *scratch, uint32_t view_index, uint32_t image_index)
114114+{
115115+ return comp_scratch_single_images_get_sample_view(&scratch->views[view_index].cssi, image_index);
116116+}
117117+118118+/*!
119119+ * Get the storage view, see @ref comp_scratch_single_images_get_storage_view.
120120+ *
121121+ * @memberof chl_scratch
122122+ */
123123+static inline VkImageView
124124+chl_scratch_get_storage_view(struct chl_scratch *scratch, uint32_t view_index, uint32_t image_index)
125125+{
126126+ return comp_scratch_single_images_get_storage_view(&scratch->views[view_index].cssi, image_index);
127127+}
128128+129129+130130+/*
131131+ *
132132+ * State
133133+ *
134134+ */
135135+136136+/*!
137137+ * Per view frame state tracking which index was gotten and if it was used.
138138+ *
139139+ * @ingroup comp_util
140140+ */
141141+struct chl_scratch_state_view
142142+{
143143+ uint32_t index;
144144+145145+ bool used;
146146+};
147147+148148+/*!
149149+ * Used to track the index of images gotten for the images, and if it has been
150150+ * used. The user will need to mark images as used.
151151+ *
152152+ * @ingroup comp_util
153153+ */
154154+struct chl_scratch_state
155155+{
156156+ struct chl_scratch_state_view views[XRT_MAX_VIEWS];
157157+};
158158+159159+/*!
160160+ * Zeros out the struct and calls get on all the images, setting the @p index
161161+ * field on the state for each view.
162162+ *
163163+ * @memberof chl_scratch_state
164164+ */
165165+static inline void
166166+chl_scratch_state_init_and_get(struct chl_scratch_state *scratch_state, struct chl_scratch *scratch)
167167+{
168168+ U_ZERO(scratch_state);
169169+170170+ // Loop over all the of the images in the scratch view.
171171+ for (uint32_t i = 0; i < scratch->view_count; i++) {
172172+ comp_scratch_single_images_get(&scratch->views[i].cssi, &scratch_state->views[i].index);
173173+ }
174174+}
175175+176176+/*!
177177+ * Calls discard or done on all the scratch images, it calls done if the @p used
178178+ * field is set to true.
179179+ *
180180+ * @memberof chl_scratch_state
181181+ */
182182+static inline void
183183+chl_scratch_state_discard_or_done(struct chl_scratch_state *scratch_state, struct chl_scratch *scratch)
184184+{
185185+ // Loop over all the of the images in the scratch view.
186186+ for (uint32_t i = 0; i < scratch->view_count; i++) {
187187+ if (scratch_state->views[i].used) {
188188+ comp_scratch_single_images_done(&scratch->views[i].cssi);
189189+ } else {
190190+ comp_scratch_single_images_discard(&scratch->views[i].cssi);
191191+ }
192192+ }
193193+194194+ U_ZERO(scratch_state);
195195+}
196196+197197+198198+#ifdef __cplusplus
199199+}
200200+#endif