Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Split raster barrier into vertex and fragment barrier #77420

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 22 additions & 16 deletions doc/classes/RenderingDevice.xml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
<methods>
<method name="barrier">
<return type="void" />
<param index="0" name="from" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="1" name="to" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="0" name="from" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<param index="1" name="to" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Puts a memory barrier in place. This is used for synchronization to avoid data races. See also [method full_barrier], which may be useful for debugging.
</description>
Expand All @@ -27,7 +27,7 @@
<param index="0" name="buffer" type="RID" />
<param index="1" name="offset" type="int" />
<param index="2" name="size_bytes" type="int" />
<param index="3" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="3" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
</description>
</method>
Expand All @@ -46,7 +46,7 @@
<param index="1" name="offset" type="int" />
<param index="2" name="size_bytes" type="int" />
<param index="3" name="data" type="PackedByteArray" />
<param index="4" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="4" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
</description>
</method>
Expand Down Expand Up @@ -114,7 +114,7 @@
</method>
<method name="compute_list_end">
<return type="void" />
<param index="0" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="0" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Finishes a list of compute commands created with the [code]compute_*[/code] methods.
</description>
Expand Down Expand Up @@ -296,7 +296,7 @@
</method>
<method name="draw_list_end">
<return type="void" />
<param index="0" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="0" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Finishes a list of raster drawing commands created with the [code]draw_*[/code] methods.
</description>
Expand Down Expand Up @@ -682,7 +682,7 @@
<param index="3" name="mipmap_count" type="int" />
<param index="4" name="base_layer" type="int" />
<param index="5" name="layer_count" type="int" />
<param index="6" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="6" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Clears the specified [param texture] by replacing all of its pixels with the specified [param color]. [param base_mipmap] and [param mipmap_count] determine which mipmaps of the texture are affected by this clear operation, while [param base_layer] and [param layer_count] determine which layers of a 3D texture (or texture array) are affected by this clear operation. For 2D textures (which only have one layer by design), [param base_layer] and [param layer_count] must both be [code]0[/code].
[b]Note:[/b] [param texture] can't be cleared while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to [constant FINAL_ACTION_CONTINUE]) to clear this texture.
Expand All @@ -699,7 +699,7 @@
<param index="6" name="dst_mipmap" type="int" />
<param index="7" name="src_layer" type="int" />
<param index="8" name="dst_layer" type="int" />
<param index="9" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="9" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Copies the [param from_texture] to [param to_texture] with the specified [param from_pos], [param to_pos] and [param size] coordinates. The Z axis of the [param from_pos], [param to_pos] and [param size] must be [code]0[/code] for 2-dimensional textures. Source and destination mipmaps/layers must also be specified, with these parameters being [code]0[/code] for textures without mipmaps or single-layer textures. Returns [constant @GlobalScope.OK] if the texture copy was successful or [constant @GlobalScope.ERR_INVALID_PARAMETER] otherwise.
[b]Note:[/b] [param from_texture] texture can't be copied while a draw list that uses it as part of a framebuffer is being created. Ensure the draw list is finalized (and that the color/depth texture using it is not set to [constant FINAL_ACTION_CONTINUE]) to copy this texture.
Expand Down Expand Up @@ -786,7 +786,7 @@
<return type="int" enum="Error" />
<param index="0" name="from_texture" type="RID" />
<param index="1" name="to_texture" type="RID" />
<param index="2" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="2" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Resolves the [param from_texture] texture onto [param to_texture] with multisample antialiasing enabled. This must be used when rendering a framebuffer for MSAA to work. Returns [constant @GlobalScope.OK] if successful, [constant @GlobalScope.ERR_INVALID_PARAMETER] otherwise.
[b]Note:[/b] [param from_texture] and [param to_texture] textures must have the same dimension, format and type (color or depth).
Expand All @@ -803,7 +803,7 @@
<param index="0" name="texture" type="RID" />
<param index="1" name="layer" type="int" />
<param index="2" name="data" type="PackedByteArray" />
<param index="3" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="7" />
<param index="3" name="post_barrier" type="int" enum="RenderingDevice.BarrierMask" is_bitfield="true" default="15" />
<description>
Updates texture data with new data, replacing the previous data in place. The updated texture data must have the same dimensions and format. For 2D textures (which only have one layer), [param layer] must be [code]0[/code]. Returns [constant @GlobalScope.OK] if the update was successful, [constant @GlobalScope.ERR_INVALID_PARAMETER] otherwise.
[b]Note:[/b] Updating textures is forbidden during creation of a draw or compute list.
Expand Down Expand Up @@ -1580,19 +1580,25 @@
<constant name="DATA_FORMAT_MAX" value="218" enum="DataFormat">
Represents the size of the [enum DataFormat] enum.
</constant>
<constant name="BARRIER_MASK_RASTER" value="1" enum="BarrierMask" is_bitfield="true">
Raster barrier mask.
<constant name="BARRIER_MASK_VERTEX" value="1" enum="BarrierMask" is_bitfield="true">
Vertex shader barrier mask.
</constant>
<constant name="BARRIER_MASK_COMPUTE" value="2" enum="BarrierMask" is_bitfield="true">
<constant name="BARRIER_MASK_FRAGMENT" value="2" enum="BarrierMask" is_bitfield="true">
Fragment shader barrier mask.
</constant>
<constant name="BARRIER_MASK_COMPUTE" value="4" enum="BarrierMask" is_bitfield="true">
Compute barrier mask.
</constant>
<constant name="BARRIER_MASK_TRANSFER" value="4" enum="BarrierMask" is_bitfield="true">
<constant name="BARRIER_MASK_TRANSFER" value="8" enum="BarrierMask" is_bitfield="true">
Transfer barrier mask.
</constant>
<constant name="BARRIER_MASK_ALL_BARRIERS" value="7" enum="BarrierMask" is_bitfield="true">
<constant name="BARRIER_MASK_RASTER" value="3" enum="BarrierMask" is_bitfield="true">
Raster barrier mask (vertex and fragment). Equivalent to [code]BARRIER_MASK_VERTEX | BARRIER_MASK_FRAGMENT[/code].
</constant>
<constant name="BARRIER_MASK_ALL_BARRIERS" value="15" enum="BarrierMask" is_bitfield="true">
Barrier mask for all types (raster, compute, transfer). Equivalent to [code]BARRIER_MASK_RASTER | BARRIER_MASK_COMPUTE | BARRIER_MASK_TRANSFER[/code].
</constant>
<constant name="BARRIER_MASK_NO_BARRIER" value="8" enum="BarrierMask" is_bitfield="true">
<constant name="BARRIER_MASK_NO_BARRIER" value="16" enum="BarrierMask" is_bitfield="true">
No barrier for any type.
</constant>
<constant name="TEXTURE_TYPE_1D" value="0" enum="TextureType">
Expand Down
89 changes: 66 additions & 23 deletions drivers/vulkan/rendering_device_vulkan.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,13 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
r_access_mask |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
if (buffer->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) {
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) {
r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
Expand All @@ -68,17 +72,24 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID
r_access_mask |= VK_ACCESS_INDEX_READ_BIT;
buffer = index_buffer_owner.get_or_null(p_buffer);
} else if (uniform_buffer_owner.owns(p_buffer)) {
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) {
r_stage_mask |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
}
r_access_mask |= VK_ACCESS_UNIFORM_READ_BIT;
buffer = uniform_buffer_owner.get_or_null(p_buffer);
} else if (texture_buffer_owner.owns(p_buffer)) {
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
r_access_mask |= VK_ACCESS_SHADER_READ_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
r_access_mask |= VK_ACCESS_SHADER_READ_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) {
Expand All @@ -89,8 +100,12 @@ RenderingDeviceVulkan::Buffer *RenderingDeviceVulkan::_get_buffer_from_owner(RID
buffer = &texture_buffer_owner.get_or_null(p_buffer)->buffer;
} else if (storage_buffer_owner.owns(p_buffer)) {
buffer = storage_buffer_owner.get_or_null(p_buffer);
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
r_stage_mask |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
r_stage_mask |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
r_access_mask |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_COMPUTE)) {
Expand Down Expand Up @@ -2625,8 +2640,12 @@ Error RenderingDeviceVulkan::_texture_update(RID p_texture, uint32_t p_layer, co
barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) {
Expand Down Expand Up @@ -3020,8 +3039,12 @@ Error RenderingDeviceVulkan::texture_copy(RID p_from_texture, RID p_to_texture,
barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) {
Expand Down Expand Up @@ -3198,8 +3221,12 @@ Error RenderingDeviceVulkan::texture_resolve_multisample(RID p_from_texture, RID
barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) {
Expand Down Expand Up @@ -3334,8 +3361,12 @@ Error RenderingDeviceVulkan::texture_clear(RID p_texture, const Color &p_color,
barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) {
Expand Down Expand Up @@ -7651,10 +7682,14 @@ void RenderingDeviceVulkan::draw_list_end(BitField<BarrierMask> p_post_barrier)
barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT /*| VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT*/;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT /*| VK_ACCESS_INDIRECT_COMMAND_READ_BIT*/;
}
if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) {
barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
Expand Down Expand Up @@ -8199,10 +8234,14 @@ void RenderingDeviceVulkan::compute_list_end(BitField<BarrierMask> p_post_barrie
barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_RASTER)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
if (p_post_barrier.has_flag(BARRIER_MASK_VERTEX)) {
barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_FRAGMENT)) {
barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
}
if (p_post_barrier.has_flag(BARRIER_MASK_TRANSFER)) {
barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
Expand All @@ -8227,7 +8266,7 @@ void RenderingDeviceVulkan::barrier(BitField<BarrierMask> p_from, BitField<Barri
src_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
src_access_flags |= VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_from.has_flag(BARRIER_MASK_RASTER)) {
if (p_from.has_flag(BARRIER_MASK_FRAGMENT)) {
src_barrier_flags |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
src_access_flags |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
}
Expand All @@ -8247,10 +8286,14 @@ void RenderingDeviceVulkan::barrier(BitField<BarrierMask> p_from, BitField<Barri
dst_barrier_flags |= VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
}
if (p_to.has_flag(BARRIER_MASK_RASTER)) {
dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
if (p_to.has_flag(BARRIER_MASK_VERTEX)) {
dst_barrier_flags |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDEX_READ_BIT | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
}
if (p_to.has_flag(BARRIER_MASK_FRAGMENT)) {
dst_barrier_flags |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
dst_access_flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
}
if (p_to.has_flag(BARRIER_MASK_TRANSFER)) {
dst_barrier_flags |= VK_PIPELINE_STAGE_TRANSFER_BIT;
dst_access_flags |= VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
Expand Down
Loading