render/vulkan: move blend-to-output resources to render_xxx_pass

This commit is contained in:
YaoBing Xiao 2026-03-19 10:39:12 +08:00
parent e78131f3d3
commit 6473365361
5 changed files with 264 additions and 160 deletions

View file

@ -302,31 +302,8 @@ struct wlr_vk_renderer {
VkCommandPool command_pool; VkCommandPool command_pool;
VkShaderModule vert_module;
VkShaderModule tex_frag_module;
VkShaderModule quad_frag_module;
VkShaderModule output_module;
struct wl_list pipeline_layouts; // struct wlr_vk_pipeline_layout.link struct wl_list pipeline_layouts; // struct wlr_vk_pipeline_layout.link
// for blend->output subpass
VkPipelineLayout output_pipe_layout;
VkDescriptorSetLayout output_ds_srgb_layout;
VkDescriptorSetLayout output_ds_lut3d_layout;
VkSampler output_sampler_lut3d;
// descriptor set indicating dummy 1x1x1 image, for use in the lut3d slot
VkDescriptorSet output_ds_lut3d_dummy;
struct wlr_vk_descriptor_pool *output_ds_lut3d_dummy_pool;
size_t last_output_pool_size;
struct wl_list output_descriptor_pools; // wlr_vk_descriptor_pool.link
// dummy sampler to bind when output shader is not using a lookup table
VkImage dummy3d_image;
VkDeviceMemory dummy3d_mem;
VkImageView dummy3d_image_view;
bool dummy3d_image_transitioned;
VkSemaphore timeline_semaphore; VkSemaphore timeline_semaphore;
uint64_t timeline_point; uint64_t timeline_point;
@ -392,7 +369,8 @@ struct wlr_vk_texture_view {
struct wlr_vk_pipeline *setup_get_or_create_pipeline( struct wlr_vk_pipeline *setup_get_or_create_pipeline(
struct wlr_vk_render_format_setup *setup, struct wlr_vk_render_format_setup *setup,
const struct wlr_vk_pipeline_key *key); const struct wlr_vk_pipeline_key *key,
VkShaderModule vert_module, VkShaderModule frag_module);
struct wlr_vk_pipeline_layout *get_or_create_pipeline_layout( struct wlr_vk_pipeline_layout *get_or_create_pipeline_layout(
struct wlr_vk_renderer *renderer, struct wlr_vk_renderer *renderer,
const struct wlr_vk_pipeline_layout_key *key); const struct wlr_vk_pipeline_layout_key *key);
@ -468,6 +446,7 @@ void vulkan_free_ds(struct wlr_vk_renderer *renderer,
struct wlr_vk_format_props *vulkan_format_props_from_drm( struct wlr_vk_format_props *vulkan_format_props_from_drm(
struct wlr_vk_device *dev, uint32_t drm_format); struct wlr_vk_device *dev, uint32_t drm_format);
struct wlr_vk_renderer *vulkan_get_renderer(struct wlr_renderer *r); struct wlr_vk_renderer *vulkan_get_renderer(struct wlr_renderer *r);
VkShaderModule vulkan_create_common_vert_module(struct wlr_vk_renderer *renderer);
struct wlr_vk_command_buffer *vulkan_acquire_command_buffer( struct wlr_vk_command_buffer *vulkan_acquire_command_buffer(
struct wlr_vk_renderer *renderer); struct wlr_vk_renderer *renderer);
@ -594,6 +573,11 @@ void vulkan_change_layout(VkCommandBuffer cb, VkImage img,
struct wlr_vk_render_rect_pass { struct wlr_vk_render_rect_pass {
struct wlr_render_rect_pass base; struct wlr_render_rect_pass base;
struct wlr_vk_renderer *renderer;
struct {
VkShaderModule vert_module;
VkShaderModule frag_module;
} shader;
}; };
bool wlr_render_rect_pass_is_vk(const struct wlr_render_rect_pass *rect_pass); bool wlr_render_rect_pass_is_vk(const struct wlr_render_rect_pass *rect_pass);
@ -602,6 +586,11 @@ struct wlr_vk_render_rect_pass *wlr_vk_render_rect_pass_from_pass(
struct wlr_vk_render_texture_pass { struct wlr_vk_render_texture_pass {
struct wlr_render_texture_pass base; struct wlr_render_texture_pass base;
struct wlr_vk_renderer *renderer;
struct {
VkShaderModule vert_module;
VkShaderModule frag_module;
} shader;
}; };
bool wlr_render_texture_pass_is_vk(const struct wlr_render_texture_pass *texture_pass); bool wlr_render_texture_pass_is_vk(const struct wlr_render_texture_pass *texture_pass);
@ -610,10 +599,36 @@ struct wlr_vk_render_texture_pass *wlr_vk_render_texture_pass_from_pass(
struct wlr_vk_render_submit_pass { struct wlr_vk_render_submit_pass {
struct wlr_render_submit_pass base; struct wlr_render_submit_pass base;
struct wlr_vk_renderer *renderer;
struct {
VkShaderModule vert_module;
VkShaderModule frag_module;
VkPipelineLayout pipe_layout;
VkDescriptorSetLayout ds_srgb_layout;
VkDescriptorSetLayout ds_lut3d_layout;
VkSampler sampler_lut3d;
// descriptor set indicating dummy 1x1x1 image, for use in the lut3d slot
VkDescriptorSet ds_lut3d_dummy;
struct wlr_vk_descriptor_pool *ds_lut3d_dummy_pool;
size_t last_pool_size;
struct wl_list descriptor_pools; // wlr_vk_descriptor_pool.link
// dummy sampler to bind when output shader is not using a lookup table
VkImage dummy3d_image;
VkDeviceMemory dummy3d_mem;
VkImageView dummy3d_image_view;
bool dummy3d_image_transitioned;
} output;
}; };
bool wlr_render_submit_pass_is_vk(const struct wlr_render_submit_pass *submit_pass); bool wlr_render_submit_pass_is_vk(const struct wlr_render_submit_pass *submit_pass);
struct wlr_vk_render_submit_pass *wlr_vk_render_submit_pass_from_pass( struct wlr_vk_render_submit_pass *wlr_vk_render_submit_pass_from_pass(
struct wlr_render_submit_pass *submit_pass); struct wlr_render_submit_pass *submit_pass);
bool vulkan_init_submit_pass_output(struct wlr_vk_renderer *renderer,
struct wlr_vk_render_submit_pass *submit_pass);
#endif // RENDER_VULKAN_H #endif // RENDER_VULKAN_H

View file

@ -112,7 +112,8 @@ struct wlr_render_rect_pass *get_or_create_render_rect_pass(
struct wlr_render_rect_pass *wlr_pixman_render_rect_pass_create(void); struct wlr_render_rect_pass *wlr_pixman_render_rect_pass_create(void);
struct wlr_render_rect_pass *wlr_gles2_render_rect_pass_create( struct wlr_render_rect_pass *wlr_gles2_render_rect_pass_create(
struct wlr_renderer *wlr_renderer); struct wlr_renderer *wlr_renderer);
struct wlr_render_rect_pass *wlr_vk_render_rect_pass_create(void); struct wlr_render_rect_pass *wlr_vk_render_rect_pass_create(
struct wlr_renderer *wlr_renderer);
struct wlr_render_texture_pass; struct wlr_render_texture_pass;
@ -138,7 +139,8 @@ struct wlr_render_texture_pass *get_or_create_render_texture_pass(
struct wlr_render_texture_pass *wlr_pixman_render_texture_pass_create(void); struct wlr_render_texture_pass *wlr_pixman_render_texture_pass_create(void);
struct wlr_render_texture_pass *wlr_gles2_render_texture_pass_create( struct wlr_render_texture_pass *wlr_gles2_render_texture_pass_create(
struct wlr_renderer *wlr_renderer); struct wlr_renderer *wlr_renderer);
struct wlr_render_texture_pass *wlr_vk_render_texture_pass_create(void); struct wlr_render_texture_pass *wlr_vk_render_texture_pass_create(
struct wlr_renderer *wlr_renderer);
struct wlr_render_submit_pass; struct wlr_render_submit_pass;
@ -161,6 +163,7 @@ struct wlr_render_submit_pass *get_or_create_render_submit_pass(
struct wlr_renderer *renderer); struct wlr_renderer *renderer);
struct wlr_render_submit_pass *wlr_pixman_render_submit_pass_create(void); struct wlr_render_submit_pass *wlr_pixman_render_submit_pass_create(void);
struct wlr_render_submit_pass *wlr_gles2_render_submit_pass_create(void); struct wlr_render_submit_pass *wlr_gles2_render_submit_pass_create(void);
struct wlr_render_submit_pass *wlr_vk_render_submit_pass_create(void); struct wlr_render_submit_pass *wlr_vk_render_submit_pass_create(
struct wlr_renderer *wlr_renderer);
#endif #endif

View file

@ -154,7 +154,7 @@ struct wlr_render_rect_pass *get_or_create_render_rect_pass(
#if WLR_HAS_VULKAN_RENDERER #if WLR_HAS_VULKAN_RENDERER
else if (wlr_renderer_is_vk(renderer)) { else if (wlr_renderer_is_vk(renderer)) {
pass = wlr_vk_render_rect_pass_create(); pass = wlr_vk_render_rect_pass_create(renderer);
} }
#endif #endif
@ -206,7 +206,7 @@ struct wlr_render_texture_pass *get_or_create_render_texture_pass(
#if WLR_HAS_VULKAN_RENDERER #if WLR_HAS_VULKAN_RENDERER
else if (wlr_renderer_is_vk(renderer)) { else if (wlr_renderer_is_vk(renderer)) {
pass = wlr_vk_render_texture_pass_create(); pass = wlr_vk_render_texture_pass_create(renderer);
} }
#endif #endif
@ -259,7 +259,7 @@ struct wlr_render_submit_pass *get_or_create_render_submit_pass(
#if WLR_HAS_VULKAN_RENDERER #if WLR_HAS_VULKAN_RENDERER
else if (wlr_renderer_is_vk(renderer)) { else if (wlr_renderer_is_vk(renderer)) {
pass = wlr_vk_render_submit_pass_create(); pass = wlr_vk_render_submit_pass_create(renderer);
} }
#endif #endif

View file

@ -3,16 +3,35 @@
#include <stdlib.h> #include <stdlib.h>
#include <unistd.h> #include <unistd.h>
#include <wlr/util/log.h> #include <wlr/util/log.h>
#include <wlr/render/color.h>
#include <wlr/render/drm_syncobj.h> #include <wlr/render/drm_syncobj.h>
#include "render/color.h" #include "render/color.h"
#include "render/vulkan.h" #include "render/vulkan.h"
#include "render/vulkan/shaders/quad.frag.h"
#include "render/vulkan/shaders/texture.frag.h"
#include "util/matrix.h" #include "util/matrix.h"
static const struct wlr_render_pass_impl render_pass_impl; static const struct wlr_render_pass_impl render_pass_impl;
static const struct wlr_addon_interface vk_color_transform_impl; static const struct wlr_addon_interface vk_color_transform_impl;
static bool create_shader_module(VkDevice dev, const uint32_t *code,
size_t code_size, const char *name, VkShaderModule *out) {
VkShaderModuleCreateInfo sinfo = {
.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
.codeSize = code_size,
.pCode = code,
};
VkResult res = vkCreateShaderModule(dev, &sinfo, NULL, out);
if (res != VK_SUCCESS) {
wlr_log(WLR_ERROR, "Failed to create %s shader module: %s (%d)",
name, vulkan_strerror(res), res);
return false;
}
return true;
}
static struct wlr_vk_render_pass *get_render_pass(struct wlr_render_pass *wlr_pass) { static struct wlr_vk_render_pass *get_render_pass(struct wlr_render_pass *wlr_pass) {
assert(wlr_pass->impl == &render_pass_impl); assert(wlr_pass->impl == &render_pass_impl);
struct wlr_vk_render_pass *pass = wl_container_of(wlr_pass, pass, base); struct wlr_vk_render_pass *pass = wl_container_of(wlr_pass, pass, base);
@ -223,6 +242,9 @@ static bool unwrap_color_transform(struct wlr_color_transform *transform,
static bool render_pass_submit(struct wlr_render_pass *wlr_pass) { static bool render_pass_submit(struct wlr_render_pass *wlr_pass) {
struct wlr_vk_render_pass *pass = get_render_pass(wlr_pass); struct wlr_vk_render_pass *pass = get_render_pass(wlr_pass);
struct wlr_vk_renderer *renderer = pass->renderer; struct wlr_vk_renderer *renderer = pass->renderer;
struct wlr_vk_render_submit_pass *submit_pass =
wlr_vk_render_submit_pass_from_pass(renderer->wlr_renderer.submit_pass);
assert(submit_pass != NULL);
struct wlr_vk_command_buffer *render_cb = pass->command_buffer; struct wlr_vk_command_buffer *render_cb = pass->command_buffer;
struct wlr_vk_render_buffer *render_buffer = pass->render_buffer; struct wlr_vk_render_buffer *render_buffer = pass->render_buffer;
struct wlr_vk_command_buffer *stage_cb = NULL; struct wlr_vk_command_buffer *stage_cb = NULL;
@ -306,9 +328,9 @@ static bool render_pass_submit(struct wlr_render_pass *wlr_pass) {
} }
} }
bind_pipeline(pass, pipeline); bind_pipeline(pass, pipeline);
vkCmdPushConstants(render_cb->vk, renderer->output_pipe_layout, vkCmdPushConstants(render_cb->vk, submit_pass->output.pipe_layout,
VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(vert_pcr_data), &vert_pcr_data); VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(vert_pcr_data), &vert_pcr_data);
vkCmdPushConstants(render_cb->vk, renderer->output_pipe_layout, vkCmdPushConstants(render_cb->vk, submit_pass->output.pipe_layout,
VK_SHADER_STAGE_FRAGMENT_BIT, sizeof(vert_pcr_data), VK_SHADER_STAGE_FRAGMENT_BIT, sizeof(vert_pcr_data),
sizeof(frag_pcr_data), &frag_pcr_data); sizeof(frag_pcr_data), &frag_pcr_data);
@ -316,7 +338,7 @@ static bool render_pass_submit(struct wlr_render_pass *wlr_pass) {
if (need_lut) { if (need_lut) {
lut_ds = transform->lut_3d.ds; lut_ds = transform->lut_3d.ds;
} else { } else {
lut_ds = renderer->output_ds_lut3d_dummy; lut_ds = submit_pass->output.ds_lut3d_dummy;
} }
VkDescriptorSet ds[] = { VkDescriptorSet ds[] = {
render_buffer->two_pass.blend_descriptor_set, // set 0 render_buffer->two_pass.blend_descriptor_set, // set 0
@ -324,7 +346,7 @@ static bool render_pass_submit(struct wlr_render_pass *wlr_pass) {
}; };
size_t ds_len = sizeof(ds) / sizeof(ds[0]); size_t ds_len = sizeof(ds) / sizeof(ds[0]);
vkCmdBindDescriptorSets(render_cb->vk, vkCmdBindDescriptorSets(render_cb->vk,
VK_PIPELINE_BIND_POINT_GRAPHICS, renderer->output_pipe_layout, VK_PIPELINE_BIND_POINT_GRAPHICS, submit_pass->output.pipe_layout,
0, ds_len, ds, 0, NULL); 0, ds_len, ds, 0, NULL);
const pixman_region32_t *clip = rect_union_evaluate(&pass->updated_region); const pixman_region32_t *clip = rect_union_evaluate(&pass->updated_region);
@ -684,6 +706,9 @@ static void render_pass_mark_box_updated(struct wlr_vk_render_pass *pass,
static void render_pass_add_rect(struct wlr_render_pass *wlr_pass, static void render_pass_add_rect(struct wlr_render_pass *wlr_pass,
const struct wlr_render_rect_options *options) { const struct wlr_render_rect_options *options) {
struct wlr_vk_render_pass *pass = get_render_pass(wlr_pass); struct wlr_vk_render_pass *pass = get_render_pass(wlr_pass);
struct wlr_vk_render_rect_pass *rect_pass =
wlr_vk_render_rect_pass_from_pass(pass->renderer->wlr_renderer.rect_pass);
assert(rect_pass != NULL);
VkCommandBuffer cb = pass->command_buffer->vk; VkCommandBuffer cb = pass->command_buffer->vk;
// Input color values are given in sRGB space, shader expects // Input color values are given in sRGB space, shader expects
@ -733,7 +758,9 @@ static void render_pass_add_rect(struct wlr_render_pass *wlr_pass,
&(struct wlr_vk_pipeline_key) { &(struct wlr_vk_pipeline_key) {
.source = WLR_VK_SHADER_SOURCE_SINGLE_COLOR, .source = WLR_VK_SHADER_SOURCE_SINGLE_COLOR,
.layout = {0}, .layout = {0},
}); },
rect_pass->shader.vert_module,
rect_pass->shader.frag_module);
if (!pipe) { if (!pipe) {
pass->failed = true; pass->failed = true;
break; break;
@ -787,6 +814,9 @@ static void render_pass_add_texture(struct wlr_render_pass *wlr_pass,
const struct wlr_render_texture_options *options) { const struct wlr_render_texture_options *options) {
struct wlr_vk_render_pass *pass = get_render_pass(wlr_pass); struct wlr_vk_render_pass *pass = get_render_pass(wlr_pass);
struct wlr_vk_renderer *renderer = pass->renderer; struct wlr_vk_renderer *renderer = pass->renderer;
struct wlr_vk_render_texture_pass *texture_pass =
wlr_vk_render_texture_pass_from_pass(renderer->wlr_renderer.texture_pass);
assert(texture_pass != NULL);
VkCommandBuffer cb = pass->command_buffer->vk; VkCommandBuffer cb = pass->command_buffer->vk;
struct wlr_vk_texture *texture = vulkan_get_texture(options->texture); struct wlr_vk_texture *texture = vulkan_get_texture(options->texture);
@ -884,7 +914,9 @@ static void render_pass_add_texture(struct wlr_render_pass *wlr_pass,
.texture_transform = tex_transform, .texture_transform = tex_transform,
.blend_mode = !texture->has_alpha && alpha == 1.0 ? .blend_mode = !texture->has_alpha && alpha == 1.0 ?
WLR_RENDER_BLEND_MODE_NONE : options->blend_mode, WLR_RENDER_BLEND_MODE_NONE : options->blend_mode,
}); },
texture_pass->shader.vert_module,
texture_pass->shader.frag_module);
if (!pipe) { if (!pipe) {
pass->failed = true; pass->failed = true;
return; return;
@ -1019,6 +1051,12 @@ static bool create_3d_lut_image(struct wlr_vk_renderer *renderer,
VkImage *image, VkImageView *image_view, VkImage *image, VkImageView *image_view,
VkDeviceMemory *memory, VkDescriptorSet *ds, VkDeviceMemory *memory, VkDescriptorSet *ds,
struct wlr_vk_descriptor_pool **ds_pool) { struct wlr_vk_descriptor_pool **ds_pool) {
struct wlr_vk_render_submit_pass *submit_pass =
wlr_vk_render_submit_pass_from_pass(renderer->wlr_renderer.submit_pass);;
if (submit_pass == NULL) {
return false;
}
VkDevice dev = renderer->dev->dev; VkDevice dev = renderer->dev->dev;
VkResult res; VkResult res;
@ -1157,7 +1195,7 @@ static bool create_3d_lut_image(struct wlr_vk_renderer *renderer,
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_ACCESS_SHADER_READ_BIT); VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_ACCESS_SHADER_READ_BIT);
*ds_pool = vulkan_alloc_texture_ds(renderer, *ds_pool = vulkan_alloc_texture_ds(renderer,
renderer->output_ds_lut3d_layout, ds); submit_pass->output.ds_lut3d_layout, ds);
if (!*ds_pool) { if (!*ds_pool) {
wlr_log(WLR_ERROR, "Failed to allocate descriptor"); wlr_log(WLR_ERROR, "Failed to allocate descriptor");
goto fail_imageview; goto fail_imageview;
@ -1326,9 +1364,12 @@ struct wlr_vk_render_pass *vulkan_begin_render_pass(struct wlr_vk_renderer *rend
return NULL; return NULL;
} }
if (!renderer->dummy3d_image_transitioned) { struct wlr_vk_render_submit_pass *submit_pass =
renderer->dummy3d_image_transitioned = true; wlr_vk_render_submit_pass_from_pass(renderer->wlr_renderer.submit_pass);
vulkan_change_layout(cb->vk, renderer->dummy3d_image, assert(submit_pass != NULL);
if (!submit_pass->output.dummy3d_image_transitioned) {
submit_pass->output.dummy3d_image_transitioned = true;
vulkan_change_layout(cb->vk, submit_pass->output.dummy3d_image,
VK_IMAGE_LAYOUT_UNDEFINED, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
0, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 0, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_ACCESS_SHADER_READ_BIT); VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_ACCESS_SHADER_READ_BIT);
@ -1368,6 +1409,14 @@ struct wlr_vk_render_pass *vulkan_begin_render_pass(struct wlr_vk_renderer *rend
static void render_rect_pass_destroy(struct wlr_render_rect_pass *pass) { static void render_rect_pass_destroy(struct wlr_render_rect_pass *pass) {
struct wlr_vk_render_rect_pass *vk_pass = struct wlr_vk_render_rect_pass *vk_pass =
wlr_vk_render_rect_pass_from_pass(pass); wlr_vk_render_rect_pass_from_pass(pass);
if (vk_pass->shader.vert_module != VK_NULL_HANDLE) {
vkDestroyShaderModule(vk_pass->renderer->dev->dev,
vk_pass->shader.vert_module, NULL);
}
if (vk_pass->shader.frag_module != VK_NULL_HANDLE) {
vkDestroyShaderModule(vk_pass->renderer->dev->dev,
vk_pass->shader.frag_module, NULL);
}
free(vk_pass); free(vk_pass);
} }
@ -1376,15 +1425,30 @@ static const struct wlr_render_rect_pass_impl render_rect_pass_impl = {
.render = render_pass_add_rect, .render = render_pass_add_rect,
}; };
struct wlr_render_rect_pass *wlr_vk_render_rect_pass_create(void) { struct wlr_render_rect_pass *wlr_vk_render_rect_pass_create(
struct wlr_renderer *wlr_renderer) {
struct wlr_vk_render_rect_pass *pass = calloc(1, sizeof(*pass)); struct wlr_vk_render_rect_pass *pass = calloc(1, sizeof(*pass));
if (pass == NULL) { if (pass == NULL) {
wlr_log_errno(WLR_ERROR, "failed to allocate wlr_vk_render_rect_pass"); wlr_log_errno(WLR_ERROR, "failed to allocate wlr_vk_render_rect_pass");
return NULL; return NULL;
} }
wlr_render_rect_pass_init(&pass->base, &render_rect_pass_impl); wlr_render_rect_pass_init(&pass->base, &render_rect_pass_impl);
struct wlr_vk_renderer *renderer = vulkan_get_renderer(wlr_renderer);
pass->renderer = renderer;
pass->shader.vert_module = vulkan_create_common_vert_module(renderer);
if (pass->shader.vert_module == VK_NULL_HANDLE) {
render_rect_pass_destroy(&pass->base);
return NULL;
}
VkDevice dev = renderer->dev->dev;
if (!create_shader_module(dev, quad_frag_data, sizeof(quad_frag_data),
"quad fragment", &pass->shader.frag_module)) {
render_rect_pass_destroy(&pass->base);
return NULL;
}
return &pass->base; return &pass->base;
} }
@ -1407,6 +1471,14 @@ struct wlr_vk_render_rect_pass *wlr_vk_render_rect_pass_from_pass(
static void render_texture_pass_destroy(struct wlr_render_texture_pass *pass) { static void render_texture_pass_destroy(struct wlr_render_texture_pass *pass) {
struct wlr_vk_render_texture_pass *vk_pass = struct wlr_vk_render_texture_pass *vk_pass =
wlr_vk_render_texture_pass_from_pass(pass); wlr_vk_render_texture_pass_from_pass(pass);
if (vk_pass->shader.vert_module != VK_NULL_HANDLE) {
vkDestroyShaderModule(vk_pass->renderer->dev->dev,
vk_pass->shader.vert_module, NULL);
}
if (vk_pass->shader.frag_module != VK_NULL_HANDLE) {
vkDestroyShaderModule(vk_pass->renderer->dev->dev,
vk_pass->shader.frag_module, NULL);
}
free(vk_pass); free(vk_pass);
} }
@ -1415,15 +1487,30 @@ static const struct wlr_render_texture_pass_impl render_texture_pass_impl = {
.render = render_pass_add_texture, .render = render_pass_add_texture,
}; };
struct wlr_render_texture_pass *wlr_vk_render_texture_pass_create(void) { struct wlr_render_texture_pass *wlr_vk_render_texture_pass_create(
struct wlr_renderer *wlr_renderer) {
struct wlr_vk_render_texture_pass *pass = calloc(1, sizeof(*pass)); struct wlr_vk_render_texture_pass *pass = calloc(1, sizeof(*pass));
if (pass == NULL) { if (pass == NULL) {
wlr_log_errno(WLR_ERROR, "failed to allocate wlr_vk_render_texture_pass"); wlr_log_errno(WLR_ERROR, "failed to allocate wlr_vk_render_texture_pass");
return NULL; return NULL;
} }
wlr_render_texture_pass_init(&pass->base, &render_texture_pass_impl); wlr_render_texture_pass_init(&pass->base, &render_texture_pass_impl);
struct wlr_vk_renderer *renderer = vulkan_get_renderer(wlr_renderer);
pass->renderer = renderer;
pass->shader.vert_module = vulkan_create_common_vert_module(renderer);
if (pass->shader.vert_module == VK_NULL_HANDLE) {
render_texture_pass_destroy(&pass->base);
return NULL;
}
VkDevice dev = renderer->dev->dev;
if (!create_shader_module(dev, texture_frag_data, sizeof(texture_frag_data),
"texture fragment", &pass->shader.frag_module)) {
render_texture_pass_destroy(&pass->base);
return NULL;
}
return &pass->base; return &pass->base;
} }
bool wlr_render_texture_pass_is_vk(const struct wlr_render_texture_pass *texture_pass) { bool wlr_render_texture_pass_is_vk(const struct wlr_render_texture_pass *texture_pass) {
@ -1444,6 +1531,24 @@ struct wlr_vk_render_texture_pass *wlr_vk_render_texture_pass_from_pass(
static void render_submit_pass_destroy(struct wlr_render_submit_pass *pass) { static void render_submit_pass_destroy(struct wlr_render_submit_pass *pass) {
struct wlr_vk_render_submit_pass *vk_pass = struct wlr_vk_render_submit_pass *vk_pass =
wlr_vk_render_submit_pass_from_pass(pass); wlr_vk_render_submit_pass_from_pass(pass);
VkDevice dev = vk_pass->renderer->dev->dev;
struct wlr_vk_descriptor_pool *pool, *tmp_pool;
wl_list_for_each_safe(pool, tmp_pool, &vk_pass->output.descriptor_pools, link) {
vkDestroyDescriptorPool(dev, pool->pool, NULL);
free(pool);
}
vkDestroyShaderModule(dev, vk_pass->output.vert_module, NULL);
vkDestroyShaderModule(dev, vk_pass->output.frag_module, NULL);
vkDestroyImageView(dev, vk_pass->output.dummy3d_image_view, NULL);
vkDestroyImage(dev, vk_pass->output.dummy3d_image, NULL);
vkFreeMemory(dev, vk_pass->output.dummy3d_mem, NULL);
vkDestroyPipelineLayout(dev, vk_pass->output.pipe_layout, NULL);
vkDestroyDescriptorSetLayout(dev, vk_pass->output.ds_srgb_layout, NULL);
vkDestroyDescriptorSetLayout(dev, vk_pass->output.ds_lut3d_layout, NULL);
vkDestroySampler(dev, vk_pass->output.sampler_lut3d, NULL);
free(vk_pass); free(vk_pass);
} }
@ -1452,7 +1557,8 @@ static const struct wlr_render_submit_pass_impl vk_render_submit_pass_impl = {
.render = render_pass_submit, .render = render_pass_submit,
}; };
struct wlr_render_submit_pass *wlr_vk_render_submit_pass_create(void) { struct wlr_render_submit_pass *wlr_vk_render_submit_pass_create(
struct wlr_renderer *wlr_renderer) {
struct wlr_vk_render_submit_pass *pass = calloc(1, sizeof(*pass)); struct wlr_vk_render_submit_pass *pass = calloc(1, sizeof(*pass));
if (pass == NULL) { if (pass == NULL) {
wlr_log_errno(WLR_ERROR, "failed to allocate wlr_vk_render_submit_pass"); wlr_log_errno(WLR_ERROR, "failed to allocate wlr_vk_render_submit_pass");
@ -1460,6 +1566,13 @@ struct wlr_render_submit_pass *wlr_vk_render_submit_pass_create(void) {
} }
wlr_render_submit_pass_init(&pass->base, &vk_render_submit_pass_impl); wlr_render_submit_pass_init(&pass->base, &vk_render_submit_pass_impl);
pass->renderer = vulkan_get_renderer(wlr_renderer);
wl_list_init(&pass->output.descriptor_pools);
if (!vulkan_init_submit_pass_output(pass->renderer, pass)) {
render_submit_pass_destroy(&pass->base);
return NULL;
}
return &pass->base; return &pass->base;
} }

View file

@ -23,8 +23,6 @@
#include "render/pixel_format.h" #include "render/pixel_format.h"
#include "render/vulkan.h" #include "render/vulkan.h"
#include "render/vulkan/shaders/common.vert.h" #include "render/vulkan/shaders/common.vert.h"
#include "render/vulkan/shaders/texture.frag.h"
#include "render/vulkan/shaders/quad.frag.h"
#include "render/vulkan/shaders/output.frag.h" #include "render/vulkan/shaders/output.frag.h"
#include "types/wlr_buffer.h" #include "types/wlr_buffer.h"
#include "util/time.h" #include "util/time.h"
@ -56,6 +54,23 @@ struct wlr_vk_renderer *vulkan_get_renderer(struct wlr_renderer *wlr_renderer) {
return renderer; return renderer;
} }
VkShaderModule vulkan_create_common_vert_module(struct wlr_vk_renderer *renderer) {
VkShaderModuleCreateInfo sinfo = {
.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
.codeSize = sizeof(common_vert_data),
.pCode = common_vert_data,
};
VkShaderModule module = VK_NULL_HANDLE;
VkResult res = vkCreateShaderModule(renderer->dev->dev, &sinfo, NULL, &module);
if (res != VK_SUCCESS) {
wlr_vk_error("Failed to create vertex shader module", res);
return VK_NULL_HANDLE;
}
return module;
}
static struct wlr_vk_render_format_setup *find_or_create_render_setup( static struct wlr_vk_render_format_setup *find_or_create_render_setup(
struct wlr_vk_renderer *renderer, const struct wlr_vk_format *format, struct wlr_vk_renderer *renderer, const struct wlr_vk_format *format,
bool has_blending_buffer, bool srgb); bool has_blending_buffer, bool srgb);
@ -152,9 +167,14 @@ struct wlr_vk_descriptor_pool *vulkan_alloc_texture_ds(
struct wlr_vk_descriptor_pool *vulkan_alloc_blend_ds( struct wlr_vk_descriptor_pool *vulkan_alloc_blend_ds(
struct wlr_vk_renderer *renderer, VkDescriptorSet *ds) { struct wlr_vk_renderer *renderer, VkDescriptorSet *ds) {
struct wlr_vk_render_submit_pass *submit_pass =
wlr_vk_render_submit_pass_from_pass(renderer->wlr_renderer.submit_pass);;
assert(submit_pass != NULL);
return alloc_ds(renderer, ds, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, return alloc_ds(renderer, ds, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
&renderer->output_ds_srgb_layout, &renderer->output_descriptor_pools, &submit_pass->output.ds_srgb_layout,
&renderer->last_output_pool_size); &submit_pass->output.descriptor_pools,
&submit_pass->output.last_pool_size);
} }
void vulkan_free_ds(struct wlr_vk_renderer *renderer, void vulkan_free_ds(struct wlr_vk_renderer *renderer,
@ -1172,15 +1192,6 @@ static void vulkan_destroy(struct wlr_renderer *wlr_renderer) {
vkDestroyDescriptorPool(dev->dev, pool->pool, NULL); vkDestroyDescriptorPool(dev->dev, pool->pool, NULL);
free(pool); free(pool);
} }
wl_list_for_each_safe(pool, tmp_pool, &renderer->output_descriptor_pools, link) {
vkDestroyDescriptorPool(dev->dev, pool->pool, NULL);
free(pool);
}
vkDestroyShaderModule(dev->dev, renderer->vert_module, NULL);
vkDestroyShaderModule(dev->dev, renderer->tex_frag_module, NULL);
vkDestroyShaderModule(dev->dev, renderer->quad_frag_module, NULL);
vkDestroyShaderModule(dev->dev, renderer->output_module, NULL);
struct wlr_vk_pipeline_layout *pipeline_layout, *pipeline_layout_tmp; struct wlr_vk_pipeline_layout *pipeline_layout, *pipeline_layout_tmp;
wl_list_for_each_safe(pipeline_layout, pipeline_layout_tmp, wl_list_for_each_safe(pipeline_layout, pipeline_layout_tmp,
@ -1192,16 +1203,8 @@ static void vulkan_destroy(struct wlr_renderer *wlr_renderer) {
free(pipeline_layout); free(pipeline_layout);
} }
vkDestroyImageView(dev->dev, renderer->dummy3d_image_view, NULL);
vkDestroyImage(dev->dev, renderer->dummy3d_image, NULL);
vkFreeMemory(dev->dev, renderer->dummy3d_mem, NULL);
vkDestroySemaphore(dev->dev, renderer->timeline_semaphore, NULL); vkDestroySemaphore(dev->dev, renderer->timeline_semaphore, NULL);
vkDestroyPipelineLayout(dev->dev, renderer->output_pipe_layout, NULL);
vkDestroyDescriptorSetLayout(dev->dev, renderer->output_ds_srgb_layout, NULL);
vkDestroyDescriptorSetLayout(dev->dev, renderer->output_ds_lut3d_layout, NULL);
vkDestroyCommandPool(dev->dev, renderer->command_pool, NULL); vkDestroyCommandPool(dev->dev, renderer->command_pool, NULL);
vkDestroySampler(dev->dev, renderer->output_sampler_lut3d, NULL);
if (renderer->read_pixels_cache.initialized) { if (renderer->read_pixels_cache.initialized) {
vkFreeMemory(dev->dev, renderer->read_pixels_cache.dst_img_memory, NULL); vkFreeMemory(dev->dev, renderer->read_pixels_cache.dst_img_memory, NULL);
@ -1551,7 +1554,8 @@ static bool init_tex_layouts(struct wlr_vk_renderer *renderer,
return true; return true;
} }
static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) { static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer,
struct wlr_vk_render_submit_pass *submit_pass) {
VkResult res; VkResult res;
VkDevice dev = renderer->dev->dev; VkDevice dev = renderer->dev->dev;
@ -1569,7 +1573,8 @@ static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) {
.pBindings = &ds_binding_input, .pBindings = &ds_binding_input,
}; };
res = vkCreateDescriptorSetLayout(dev, &ds_info, NULL, &renderer->output_ds_srgb_layout); res = vkCreateDescriptorSetLayout(dev, &ds_info, NULL,
&submit_pass->output.ds_srgb_layout);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkCreateDescriptorSetLayout", res); wlr_vk_error("vkCreateDescriptorSetLayout", res);
return false; return false;
@ -1588,7 +1593,7 @@ static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) {
}; };
res = vkCreateSampler(renderer->dev->dev, &sampler_create_info, NULL, res = vkCreateSampler(renderer->dev->dev, &sampler_create_info, NULL,
&renderer->output_sampler_lut3d); &submit_pass->output.sampler_lut3d);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkCreateSampler", res); wlr_vk_error("vkCreateSampler", res);
return false; return false;
@ -1599,7 +1604,7 @@ static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) {
.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.descriptorCount = 1, .descriptorCount = 1,
.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT, .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
.pImmutableSamplers = &renderer->output_sampler_lut3d, .pImmutableSamplers = &submit_pass->output.sampler_lut3d,
}; };
VkDescriptorSetLayoutCreateInfo ds_lut3d_info = { VkDescriptorSetLayoutCreateInfo ds_lut3d_info = {
@ -1609,7 +1614,7 @@ static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) {
}; };
res = vkCreateDescriptorSetLayout(dev, &ds_lut3d_info, NULL, res = vkCreateDescriptorSetLayout(dev, &ds_lut3d_info, NULL,
&renderer->output_ds_lut3d_layout); &submit_pass->output.ds_lut3d_layout);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkCreateDescriptorSetLayout", res); wlr_vk_error("vkCreateDescriptorSetLayout", res);
return false; return false;
@ -1630,8 +1635,8 @@ static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) {
}; };
VkDescriptorSetLayout out_ds_layouts[] = { VkDescriptorSetLayout out_ds_layouts[] = {
renderer->output_ds_srgb_layout, submit_pass->output.ds_srgb_layout,
renderer->output_ds_lut3d_layout, submit_pass->output.ds_lut3d_layout,
}; };
VkPipelineLayoutCreateInfo pl_info = { VkPipelineLayoutCreateInfo pl_info = {
@ -1642,7 +1647,8 @@ static bool init_blend_to_output_layouts(struct wlr_vk_renderer *renderer) {
.pPushConstantRanges = pc_ranges, .pPushConstantRanges = pc_ranges,
}; };
res = vkCreatePipelineLayout(dev, &pl_info, NULL, &renderer->output_pipe_layout); res = vkCreatePipelineLayout(dev, &pl_info, NULL,
&submit_pass->output.pipe_layout);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkCreatePipelineLayout", res); wlr_vk_error("vkCreatePipelineLayout", res);
return false; return false;
@ -1696,7 +1702,8 @@ static bool pipeline_key_equals(const struct wlr_vk_pipeline_key *a,
// VkRenderPass and VkPipelineLayout. // VkRenderPass and VkPipelineLayout.
struct wlr_vk_pipeline *setup_get_or_create_pipeline( struct wlr_vk_pipeline *setup_get_or_create_pipeline(
struct wlr_vk_render_format_setup *setup, struct wlr_vk_render_format_setup *setup,
const struct wlr_vk_pipeline_key *key) { const struct wlr_vk_pipeline_key *key,
VkShaderModule vert_module, VkShaderModule frag_module) {
struct wlr_vk_pipeline *pipeline; struct wlr_vk_pipeline *pipeline;
wl_list_for_each(pipeline, &setup->pipelines, link) { wl_list_for_each(pipeline, &setup->pipelines, link) {
if (pipeline_key_equals(&pipeline->key, key)) { if (pipeline_key_equals(&pipeline->key, key)) {
@ -1743,7 +1750,7 @@ struct wlr_vk_pipeline *setup_get_or_create_pipeline(
stages[0] = (VkPipelineShaderStageCreateInfo) { stages[0] = (VkPipelineShaderStageCreateInfo) {
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
.stage = VK_SHADER_STAGE_VERTEX_BIT, .stage = VK_SHADER_STAGE_VERTEX_BIT,
.module = renderer->vert_module, .module = vert_module,
.pName = "main", .pName = "main",
}; };
@ -1752,7 +1759,7 @@ struct wlr_vk_pipeline *setup_get_or_create_pipeline(
stages[1] = (VkPipelineShaderStageCreateInfo) { stages[1] = (VkPipelineShaderStageCreateInfo) {
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
.stage = VK_SHADER_STAGE_FRAGMENT_BIT, .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
.module = renderer->quad_frag_module, .module = frag_module,
.pName = "main", .pName = "main",
}; };
break; break;
@ -1760,7 +1767,7 @@ struct wlr_vk_pipeline *setup_get_or_create_pipeline(
stages[1] = (VkPipelineShaderStageCreateInfo) { stages[1] = (VkPipelineShaderStageCreateInfo) {
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
.stage = VK_SHADER_STAGE_FRAGMENT_BIT, .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
.module = renderer->tex_frag_module, .module = frag_module,
.pName = "main", .pName = "main",
.pSpecializationInfo = &specialization, .pSpecializationInfo = &specialization,
}; };
@ -1857,6 +1864,7 @@ struct wlr_vk_pipeline *setup_get_or_create_pipeline(
} }
static bool init_blend_to_output_pipeline(struct wlr_vk_renderer *renderer, static bool init_blend_to_output_pipeline(struct wlr_vk_renderer *renderer,
struct wlr_vk_render_submit_pass *submit_pass,
VkRenderPass rp, VkPipelineLayout pipe_layout, VkPipeline *pipe, VkRenderPass rp, VkPipelineLayout pipe_layout, VkPipeline *pipe,
enum wlr_vk_output_transform transform) { enum wlr_vk_output_transform transform) {
VkResult res; VkResult res;
@ -1879,13 +1887,13 @@ static bool init_blend_to_output_pipeline(struct wlr_vk_renderer *renderer,
{ {
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
.stage = VK_SHADER_STAGE_VERTEX_BIT, .stage = VK_SHADER_STAGE_VERTEX_BIT,
.module = renderer->vert_module, .module = submit_pass->output.vert_module,
.pName = "main", .pName = "main",
}, },
{ {
.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
.stage = VK_SHADER_STAGE_FRAGMENT_BIT, .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
.module = renderer->output_module, .module = submit_pass->output.frag_module,
.pName = "main", .pName = "main",
.pSpecializationInfo = &specialization, .pSpecializationInfo = &specialization,
}, },
@ -2089,7 +2097,8 @@ struct wlr_vk_pipeline_layout *get_or_create_pipeline_layout(
* the sampler, a valid descriptor set should be bound. Create that here, linked to * the sampler, a valid descriptor set should be bound. Create that here, linked to
* a 1x1x1 image. * a 1x1x1 image.
*/ */
static bool init_dummy_images(struct wlr_vk_renderer *renderer) { static bool init_dummy_images(struct wlr_vk_renderer *renderer,
struct wlr_vk_render_submit_pass *submit_pass) {
VkResult res; VkResult res;
VkDevice dev = renderer->dev->dev; VkDevice dev = renderer->dev->dev;
@ -2108,14 +2117,14 @@ static bool init_dummy_images(struct wlr_vk_renderer *renderer) {
.tiling = VK_IMAGE_TILING_OPTIMAL, .tiling = VK_IMAGE_TILING_OPTIMAL,
.usage = VK_IMAGE_USAGE_SAMPLED_BIT, .usage = VK_IMAGE_USAGE_SAMPLED_BIT,
}; };
res = vkCreateImage(dev, &img_info, NULL, &renderer->dummy3d_image); res = vkCreateImage(dev, &img_info, NULL, &submit_pass->output.dummy3d_image);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkCreateImage failed", res); wlr_vk_error("vkCreateImage failed", res);
return false; return false;
} }
VkMemoryRequirements mem_reqs = {0}; VkMemoryRequirements mem_reqs = {0};
vkGetImageMemoryRequirements(dev, renderer->dummy3d_image, &mem_reqs); vkGetImageMemoryRequirements(dev, submit_pass->output.dummy3d_image, &mem_reqs);
int mem_type_index = vulkan_find_mem_type(renderer->dev, int mem_type_index = vulkan_find_mem_type(renderer->dev,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, mem_reqs.memoryTypeBits); VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, mem_reqs.memoryTypeBits);
if (mem_type_index == -1) { if (mem_type_index == -1) {
@ -2127,12 +2136,13 @@ static bool init_dummy_images(struct wlr_vk_renderer *renderer) {
.allocationSize = mem_reqs.size, .allocationSize = mem_reqs.size,
.memoryTypeIndex = mem_type_index, .memoryTypeIndex = mem_type_index,
}; };
res = vkAllocateMemory(dev, &mem_info, NULL, &renderer->dummy3d_mem); res = vkAllocateMemory(dev, &mem_info, NULL, &submit_pass->output.dummy3d_mem);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkAllocateMemory failed", res); wlr_vk_error("vkAllocateMemory failed", res);
return false; return false;
} }
res = vkBindImageMemory(dev, renderer->dummy3d_image, renderer->dummy3d_mem, 0); res = vkBindImageMemory(dev, submit_pass->output.dummy3d_image,
submit_pass->output.dummy3d_mem, 0);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkBindMemory failed", res); wlr_vk_error("vkBindMemory failed", res);
return false; return false;
@ -2153,29 +2163,31 @@ static bool init_dummy_images(struct wlr_vk_renderer *renderer) {
.baseArrayLayer = 0, .baseArrayLayer = 0,
.layerCount = 1, .layerCount = 1,
}, },
.image = renderer->dummy3d_image, .image = submit_pass->output.dummy3d_image,
}; };
res = vkCreateImageView(dev, &view_info, NULL, &renderer->dummy3d_image_view); res = vkCreateImageView(dev, &view_info, NULL,
&submit_pass->output.dummy3d_image_view);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("vkCreateImageView failed", res); wlr_vk_error("vkCreateImageView failed", res);
return false; return false;
} }
renderer->output_ds_lut3d_dummy_pool = vulkan_alloc_texture_ds(renderer, submit_pass->output.ds_lut3d_dummy_pool = vulkan_alloc_texture_ds(renderer,
renderer->output_ds_lut3d_layout, &renderer->output_ds_lut3d_dummy); submit_pass->output.ds_lut3d_layout,
if (!renderer->output_ds_lut3d_dummy_pool) { &submit_pass->output.ds_lut3d_dummy);
if (!submit_pass->output.ds_lut3d_dummy_pool) {
wlr_log(WLR_ERROR, "Failed to allocate descriptor"); wlr_log(WLR_ERROR, "Failed to allocate descriptor");
return false; return false;
} }
VkDescriptorImageInfo ds_img_info = { VkDescriptorImageInfo ds_img_info = {
.imageView = renderer->dummy3d_image_view, .imageView = submit_pass->output.dummy3d_image_view,
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, .imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
}; };
VkWriteDescriptorSet ds_write = { VkWriteDescriptorSet ds_write = {
.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
.descriptorCount = 1, .descriptorCount = 1,
.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.dstSet = renderer->output_ds_lut3d_dummy, .dstSet = submit_pass->output.ds_lut3d_dummy,
.pImageInfo = &ds_img_info, .pImageInfo = &ds_img_info,
}; };
vkUpdateDescriptorSets(dev, 1, &ds_write, 0, NULL); vkUpdateDescriptorSets(dev, 1, &ds_write, 0, NULL);
@ -2183,53 +2195,30 @@ static bool init_dummy_images(struct wlr_vk_renderer *renderer) {
return true; return true;
} }
// Creates static render data, such as sampler, layouts and shader modules // Creates static blend->output render data for the given submit pass.
// for the given renderer. // Cleanup is done by destroying the submit pass.
// Cleanup is done by destroying the renderer. bool vulkan_init_submit_pass_output(struct wlr_vk_renderer *renderer,
static bool init_static_render_data(struct wlr_vk_renderer *renderer) { struct wlr_vk_render_submit_pass *submit_pass) {
VkResult res; VkResult res;
VkDevice dev = renderer->dev->dev; VkDevice dev = renderer->dev->dev;
if (!init_blend_to_output_layouts(renderer)) { if (!init_blend_to_output_layouts(renderer, submit_pass)) {
return false; return false;
} }
if (!init_dummy_images(renderer)) { if (!init_dummy_images(renderer, submit_pass)) {
return false; return false;
} }
// load vert module and tex frag module since they are needed to
// initialize the tex pipeline
VkShaderModuleCreateInfo sinfo = { VkShaderModuleCreateInfo sinfo = {
.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
.codeSize = sizeof(common_vert_data), .codeSize = sizeof(common_vert_data),
.pCode = common_vert_data, .pCode = common_vert_data,
}; };
res = vkCreateShaderModule(dev, &sinfo, NULL, &renderer->vert_module); res = vkCreateShaderModule(dev, &sinfo, NULL, &submit_pass->output.vert_module);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("Failed to create vertex shader module", res); wlr_vk_error("Failed to create blend->output vertex shader module", res);
return false;
}
sinfo = (VkShaderModuleCreateInfo){
.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
.codeSize = sizeof(texture_frag_data),
.pCode = texture_frag_data,
};
res = vkCreateShaderModule(dev, &sinfo, NULL, &renderer->tex_frag_module);
if (res != VK_SUCCESS) {
wlr_vk_error("Failed to create tex fragment shader module", res);
return false;
}
sinfo = (VkShaderModuleCreateInfo){
.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
.codeSize = sizeof(quad_frag_data),
.pCode = quad_frag_data,
};
res = vkCreateShaderModule(dev, &sinfo, NULL, &renderer->quad_frag_module);
if (res != VK_SUCCESS) {
wlr_vk_error("Failed to create quad fragment shader module", res);
return false; return false;
} }
@ -2238,7 +2227,7 @@ static bool init_static_render_data(struct wlr_vk_renderer *renderer) {
.codeSize = sizeof(output_frag_data), .codeSize = sizeof(output_frag_data),
.pCode = output_frag_data, .pCode = output_frag_data,
}; };
res = vkCreateShaderModule(dev, &sinfo, NULL, &renderer->output_module); res = vkCreateShaderModule(dev, &sinfo, NULL, &submit_pass->output.frag_module);
if (res != VK_SUCCESS) { if (res != VK_SUCCESS) {
wlr_vk_error("Failed to create blend->output fragment shader module", res); wlr_vk_error("Failed to create blend->output fragment shader module", res);
return false; return false;
@ -2275,6 +2264,12 @@ static struct wlr_vk_render_format_setup *find_or_create_render_setup(
VkResult res; VkResult res;
if (use_blending_buffer) { if (use_blending_buffer) {
struct wlr_vk_render_submit_pass *submit_pass =
wlr_vk_render_submit_pass_from_pass(renderer->wlr_renderer.submit_pass);;
if (submit_pass == NULL) {
goto error;
}
VkAttachmentDescription attachments[] = { VkAttachmentDescription attachments[] = {
{ {
.format = VK_FORMAT_R16G16B16A16_SFLOAT, .format = VK_FORMAT_R16G16B16A16_SFLOAT,
@ -2386,32 +2381,38 @@ static struct wlr_vk_render_format_setup *find_or_create_render_setup(
// this is only well defined if render pass has a 2nd subpass // this is only well defined if render pass has a 2nd subpass
if (!init_blend_to_output_pipeline( if (!init_blend_to_output_pipeline(
renderer, setup->render_pass, renderer->output_pipe_layout, renderer, submit_pass, setup->render_pass,
submit_pass->output.pipe_layout,
&setup->output_pipe_identity, WLR_VK_OUTPUT_TRANSFORM_IDENTITY)) { &setup->output_pipe_identity, WLR_VK_OUTPUT_TRANSFORM_IDENTITY)) {
goto error; goto error;
} }
if (!init_blend_to_output_pipeline( if (!init_blend_to_output_pipeline(
renderer, setup->render_pass, renderer->output_pipe_layout, renderer, submit_pass, setup->render_pass,
submit_pass->output.pipe_layout,
&setup->output_pipe_lut3d, WLR_VK_OUTPUT_TRANSFORM_LUT3D)) { &setup->output_pipe_lut3d, WLR_VK_OUTPUT_TRANSFORM_LUT3D)) {
goto error; goto error;
} }
if (!init_blend_to_output_pipeline( if (!init_blend_to_output_pipeline(
renderer, setup->render_pass, renderer->output_pipe_layout, renderer, submit_pass, setup->render_pass,
submit_pass->output.pipe_layout,
&setup->output_pipe_srgb, WLR_VK_OUTPUT_TRANSFORM_INVERSE_SRGB)) { &setup->output_pipe_srgb, WLR_VK_OUTPUT_TRANSFORM_INVERSE_SRGB)) {
goto error; goto error;
} }
if (!init_blend_to_output_pipeline( if (!init_blend_to_output_pipeline(
renderer, setup->render_pass, renderer->output_pipe_layout, renderer, submit_pass, setup->render_pass,
submit_pass->output.pipe_layout,
&setup->output_pipe_pq, WLR_VK_OUTPUT_TRANSFORM_INVERSE_ST2084_PQ)) { &setup->output_pipe_pq, WLR_VK_OUTPUT_TRANSFORM_INVERSE_ST2084_PQ)) {
goto error; goto error;
} }
if (!init_blend_to_output_pipeline( if (!init_blend_to_output_pipeline(
renderer, setup->render_pass, renderer->output_pipe_layout, renderer, submit_pass, setup->render_pass,
submit_pass->output.pipe_layout,
&setup->output_pipe_gamma22, WLR_VK_OUTPUT_TRANSFORM_INVERSE_GAMMA22)) { &setup->output_pipe_gamma22, WLR_VK_OUTPUT_TRANSFORM_INVERSE_GAMMA22)) {
goto error; goto error;
} }
if (!init_blend_to_output_pipeline( if (!init_blend_to_output_pipeline(
renderer, setup->render_pass, renderer->output_pipe_layout, renderer, submit_pass, setup->render_pass,
submit_pass->output.pipe_layout,
&setup->output_pipe_bt1886, WLR_VK_OUTPUT_TRANSFORM_INVERSE_BT1886)) { &setup->output_pipe_bt1886, WLR_VK_OUTPUT_TRANSFORM_INVERSE_BT1886)) {
goto error; goto error;
} }
@ -2488,29 +2489,6 @@ static struct wlr_vk_render_format_setup *find_or_create_render_setup(
} }
} }
if (!setup_get_or_create_pipeline(setup, &(struct wlr_vk_pipeline_key){
.source = WLR_VK_SHADER_SOURCE_SINGLE_COLOR,
.layout = {0},
})) {
goto error;
}
if (!setup_get_or_create_pipeline(setup, &(struct wlr_vk_pipeline_key){
.source = WLR_VK_SHADER_SOURCE_TEXTURE,
.texture_transform = WLR_VK_TEXTURE_TRANSFORM_IDENTITY,
.layout = {0},
})) {
goto error;
}
if (!setup_get_or_create_pipeline(setup, &(struct wlr_vk_pipeline_key){
.source = WLR_VK_SHADER_SOURCE_TEXTURE,
.texture_transform = WLR_VK_TEXTURE_TRANSFORM_SRGB,
.layout = {0},
})) {
goto error;
}
wl_list_insert(&renderer->render_format_setups, &setup->link); wl_list_insert(&renderer->render_format_setups, &setup->link);
return setup; return setup;
@ -2535,7 +2513,6 @@ struct wlr_renderer *vulkan_renderer_create_for_device(struct wlr_vk_device *dev
wl_list_init(&renderer->foreign_textures); wl_list_init(&renderer->foreign_textures);
wl_list_init(&renderer->textures); wl_list_init(&renderer->textures);
wl_list_init(&renderer->descriptor_pools); wl_list_init(&renderer->descriptor_pools);
wl_list_init(&renderer->output_descriptor_pools);
wl_list_init(&renderer->render_format_setups); wl_list_init(&renderer->render_format_setups);
wl_list_init(&renderer->render_buffers); wl_list_init(&renderer->render_buffers);
wl_list_init(&renderer->color_transforms); wl_list_init(&renderer->color_transforms);
@ -2551,10 +2528,6 @@ struct wlr_renderer *vulkan_renderer_create_for_device(struct wlr_vk_device *dev
renderer->wlr_renderer.features.timeline = dev->sync_file_import_export && cap_syncobj_timeline != 0; renderer->wlr_renderer.features.timeline = dev->sync_file_import_export && cap_syncobj_timeline != 0;
} }
if (!init_static_render_data(renderer)) {
goto error;
}
VkCommandPoolCreateInfo cpool_info = { VkCommandPoolCreateInfo cpool_info = {
.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,